def test_positive_add_subscription_by_id(self): """Test that subscription can be added to activation key @Feature: Activation key - Subscription @Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription @Assert: Subscription successfully added to activation key """ with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) org_id = make_org()['id'] ackey_id = self._make_activation_key()['id'] Subscription.upload({ 'file': manifest.filename, 'organization-id': org_id, }) subs_id = Subscription.list( {'organization-id': org_id}, per_page=False ) result = ActivationKey.add_subscription({ u'id': ackey_id, u'subscription-id': subs_id[0]['id'], }) self.assertIn('Subscription added to activation key', result)
def test_negative_visual_indicator_with_restricted_subscription(self): """Access AK details subscription tab and assert a visual indicator is NOT present if organization has no Golden Ticket Enabled. :id: ce5f3017-a449-45e6-8709-7d4f7b5f7a4d :steps: 1. Change to a restricted organization (with no GT enabled). 2. Access Ak details Subscription tab. :CaseAutomation: automated :expectedresults: 1. Assert GoldenTicket visual alert is NOT present. :CaseImportance: Critical """ org = entities.Organization().create() self.upload_manifest(org.id, manifests.clone()) activation_key = entities.ActivationKey(organization=org).create() with Session(self) as session: set_context(session, org=org.name, force_context=True) session.activationkey.search_and_click(activation_key.name) session.activationkey.click(tab_locators['ak.subscriptions']) self.assertIsNone( session.subscriptions.wait_until_element( common_locators['org_environment_info']) )
def test_positive_add_rh_content(session): """Add Red Hat content to a content view :id: c370fd79-0c0d-4685-99cb-848556c786c1 :setup: Sync RH content :expectedresults: RH Content can be seen in a view :CaseLevel: Integration """ cv_name = gen_string('alpha') rh_repo = { 'name': REPOS['rhst7']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhst7'], 'basearch': 'x86_64', 'releasever': None } # Create new org to import manifest org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) enable_sync_redhat_repo(rh_repo, org.id) with session: # Create content-view session.organization.select(org.name) session.contentview.create({'name': cv_name}) assert session.contentview.search(cv_name)[0]['Name'] == cv_name session.contentview.add_yum_repo(cv_name, rh_repo['name']) cv = session.contentview.read(cv_name) assert ( cv['repositories']['resources']['assigned'][0]['Name'] == rh_repo['name'] )
def organization_with_tr_data_manifests(cls, options=None): """Import Organizations (from spacewalk-report users) with manifests. :returns: A tuple of SSHCommandResult and a Dictionary containing the transition data of the Import """ # prepare manifests for every organization manifest_list = [] csv_records = cls.csv_to_dataset([options['csv-file']]) man_dir = ssh.command(u'mktemp -d').stdout[0] for org in set([rec['organization'] for rec in csv_records]): for char in [' ', '.', '#']: org = org.replace(char, '_') man_file = manifests.clone() ssh.upload_file(man_file, u'{0}/{1}.zip'.format(man_dir, org)) manifest_list.append(u'{0}/{1}.zip'.format(man_dir, org)) os.remove(man_file) options.update({'upload-manifests-from': man_dir}) result = cls.organization(options) ssh.command(u'rm -rf {0}'.format(man_dir)) transition_data = cls.read_transition_csv( ssh.command( u'ls -v ${HOME}/.transition_data/organizations*' ).stdout[:-1] ) return (result, transition_data)
def test_positive_create_after_refresh(self): """Upload a manifest,refresh it and upload a new manifest to an other organization. :id: 1869bbb6-c31b-49a9-bc92-402a90071a11 :customerscenario: true :expectedresults: the manifest is uploaded successfully to other org :BZ: 1393442 :CaseImportance: Critical """ org = entities.Organization().create() org_sub = entities.Subscription(organization=org) new_org = entities.Organization().create() new_org_sub = entities.Subscription(organization=new_org) self.upload_manifest(org.id, manifests.original_manifest()) try: org_sub.refresh_manifest(data={'organization_id': org.id}) self.assertGreater(len(org_sub.search()), 0) self.upload_manifest(new_org.id, manifests.clone()) self.assertGreater(len(new_org_sub.search()), 0) finally: org_sub.delete_manifest(data={'organization_id': org.id})
def organization_with_tr_data_manifests(cls, options=None): """Import Organizations (from spacewalk-report users) with manifests. :returns: A tuple of SSHCommandResult and a Dictionary containing the transition data of the Import """ # prepare manifests for every organization manifest_list = [] csv_records = cls.csv_to_dataset([options["csv-file"]]) man_dir = ssh.command(u"mktemp -d").stdout[0] for org in set([rec["organization"] for rec in csv_records]): for char in [" ", ".", "#"]: org = org.replace(char, "_") with manifests.clone() as manifest: ssh.upload_file(manifest.content, u"{0}/{1}.zip".format(man_dir, org)) manifest_list.append(u"{0}/{1}.zip".format(man_dir, org)) options.update({"upload-manifests-from": man_dir}) result = cls.organization(options) ssh.command(u"rm -rf {0}".format(man_dir)) transition_data = cls.read_transition_csv( ssh.command(u"ls -v ${HOME}/.transition_data/organizations*").stdout[:-1] ) return (result, transition_data)
def test_positive_access_with_non_admin_user_with_manifest(self): """Access subscription page with user that has only view_subscriptions permission and organization that has a manifest uploaded. :id: 9184fcf6-36be-42c8-984c-3c5d7834b3b4 :expectedresults: Subscription page is rendered properly without errors and the default subscription is visible :BZ: 1417082 :CaseLevel: Integration :CaseImportance: Critical """ org = entities.Organization().create() self.upload_manifest(org.id, manifests.clone()) role = entities.Role().create() create_role_permissions( role, {'Katello::Subscription': ['view_subscriptions']} ) password = gen_string('alphanumeric') user = entities.User( admin=False, role=[role], password=password, organization=[org], default_organization=org, ).create() with Session(self, user.login, password): self.subscriptions.navigate_to_entity() self.assertFalse(self.browser.current_url.endswith('katello/403')) self.assertIsNotNone( self.subscriptions.search(DEFAULT_SUBSCRIPTION_NAME))
def test_positive_add_subscription_by_id(self): """Test that subscription can be added to activation key :id: b884be1c-b35d-440a-9a9d-c854c83e10a7 :Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription :expectedresults: Subscription successfully added to activation key :BZ: 1463685 :CaseLevel: Integration """ with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) org_id = make_org()['id'] ackey_id = self._make_activation_key({'organization-id': org_id})['id'] Subscription.upload({ 'file': manifest.filename, 'organization-id': org_id, }) subs_id = Subscription.list( {'organization-id': org_id}, per_page=False ) result = ActivationKey.add_subscription({ u'id': ackey_id, u'subscription-id': subs_id[0]['id'], }) self.assertIn('Subscription added to activation key', result)
def test_verify_bugzilla_1225588(self): """Create Organization with valid values and upload manifest. Then try to delete that organization. @feature: Organization Positive Delete Test. @assert: Organization is deleted successfully. """ org_name = gen_string('alphanumeric') org = entities.Organization(name=org_name).create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) with Session(self.browser) as session: make_lifecycle_environment(session, org_name, name='DEV') make_lifecycle_environment( session, org_name, name='QE', prior='DEV' ) # Org cannot be deleted when selected, # So switching to Default Org and then deleting. session.nav.go_to_select_org('Default Organization') self.org.delete(org_name) for _ in range(10): status = self.org.search(org_name) if status is None: break self.assertIsNone(status)
def test_positive_enable_by_label(self): """Enable repo from reposet by org label, reposet and product names @id: 5230c1cd-fed7-40ac-8445-bac4f9c5ee68 @Assert: Repository was enabled """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def setUpClass(cls): # noqa super(RHAITestCase, cls).setUpClass() # Create a new organization with prefix 'insights' org = entities.Organization(name="insights_{0}".format(gen_string("alpha", 6))).create() # Upload manifest with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) # Create activation key using default CV and library environment activation_key = entities.ActivationKey( auto_attach=True, content_view=org.default_content_view.id, environment=org.library.id, name=gen_string("alpha"), organization=org, ).create() # Walk through the list of subscriptions. # Find the "Red Hat Employee Subscription" and attach it to the # recently-created activation key. for subs in entities.Subscription(organization=org).search(): if subs.read_json()["product_name"] == DEFAULT_SUBSCRIPTION_NAME: # 'quantity' must be 1, not subscription['quantity']. Greater # values produce this error: "RuntimeError: Error: Only pools # with multi-entitlement product subscriptions can be added to # the activation key with a quantity greater than one." activation_key.add_subscriptions(data={"quantity": 1, "subscription_id": subs.id}) break cls.org_label = org.label cls.ak_name = activation_key.name cls.org_name = org.name
def test_positive_update_rh_product(session): """Update Content View in an Activation key :id: 9b0ac209-45de-4cc4-97e8-e191f3f37239 :Steps: 1. Create an activation key 2. Update the content view with another content view which has RH products :expectedresults: Activation key is updated :CaseLevel: Integration """ name = gen_string('alpha') env1_name = gen_string('alpha') env2_name = gen_string('alpha') cv1_name = gen_string('alpha') cv2_name = gen_string('alpha') rh_repo1 = { 'name': REPOS['rhva6']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': DEFAULT_ARCHITECTURE, 'releasever': DEFAULT_RELEASE_VERSION, } rh_repo2 = { 'name': ('Red Hat Enterprise Virtualization Agents for RHEL 6 ' 'Server RPMs i386 6Server'), 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': 'i386', 'releasever': DEFAULT_RELEASE_VERSION, } org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) repo1_id = enable_sync_redhat_repo(rh_repo1, org.id) cv_publish_promote(cv1_name, env1_name, repo1_id, org.id) repo2_id = enable_sync_redhat_repo(rh_repo2, org.id) cv_publish_promote(cv2_name, env2_name, repo2_id, org.id) with session: session.organization.select(org.name) session.activationkey.create({ 'name': name, 'lce': {env1_name: True}, 'content_view': cv1_name, }) assert session.activationkey.search(name)[0]['Name'] == name ak = session.activationkey.read(name) assert ak['details']['content_view'] == cv1_name if bz_bug_is_open(1597639): assert session.activationkey.search(name)[0]['Name'] == name session.activationkey.update(name, {'details': { 'lce': {env2_name: True}, 'content_view': cv2_name, }}) ak = session.activationkey.read(name) assert ak['details']['content_view'] == cv2_name
def test_positive_enable_by_name(self): """Enable repo from reposet by names of reposet, org and product @Feature: Repository-set @Assert: Repository was enabled """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_negative_rct_not_shows_golden_ticket_enabled(self): """Assert restricted manifest has no Golden Ticket enabled . :id: 754c1be7-468e-4795-bcf9-258a38f3418b :steps: 1. Run `rct cat-manifest /tmp/restricted_manifest.zip`. :CaseAutomation: automated :expectedresults: 1. Assert `Content Access Mode: org_environment` is not present. :CaseImportance: Critical """ org = make_org() # upload organization manifest with org environment access enabled manifest = manifests.clone() manifests.upload_manifest_locked( org['id'], manifest, interface=manifests.INTERFACE_CLI ) result = ssh.command( 'rct cat-manifest {0}'.format(manifest.filename)) self.assertEqual(result.return_code, 0) self.assertNotIn( 'Content Access Mode: org_environment', '\n'.join(result.stdout) )
def test_repositoryset_enable_by_label(self): """@Test: Enable repo from reposet by org label, reposet and product names @Feature: Repository-set @Assert: Repository was enabled """ org = make_org() manifest = manifests.clone() upload_file(manifest, remote_file=manifest) Subscription.upload({ u'file': manifest, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_positive_reposet_enable(self): """Enable repo from reposet :id: dedcecf7-613a-4e85-a3af-92fb57e2b0a1 :expectedresults: Repository was enabled :CaseImportance: Critical """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] reposet = entities.RepositorySet( name=REPOSET['rhva6'], product=product, ).search()[0] reposet.enable(data={'basearch': 'x86_64', 'releasever': '6Server'}) repositories = reposet.available_repositories()['results'] self.assertTrue([ repo['enabled'] for repo in repositories if (repo['substitutions']['basearch'] == 'x86_64' and repo['substitutions']['releasever'] == '6Server') ][0])
def test_positive_reposet_disable(self): """Disable repo from reposet :id: 60a102df-099e-4325-8924-2a31e5f738ba :expectedresults: Repository was disabled :CaseImportance: Critical """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] reposet = entities.RepositorySet( name=REPOSET['rhva6'], product=product, ).search()[0] reposet.enable(data={'basearch': 'x86_64', 'releasever': '6Server'}) reposet.disable(data={'basearch': 'x86_64', 'releasever': '6Server'}) repositories = reposet.available_repositories()['results'] self.assertFalse([ repo['enabled'] for repo in repositories if (repo['substitutions']['basearch'] == 'x86_64' and repo['substitutions']['releasever'] == '6Server') ][0])
def test_positive_enable_by_name(self): """Enable repo from reposet by names of reposet, org and product :id: a78537bd-b88d-4f00-8901-e7944e5de729 :expectedresults: Repository was enabled :CaseImportance: Critical """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_positive_sync_rh_ostree_repo(self): """Sync CDN based ostree repository . @id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5 @Steps: 1. Import a valid manifest 2. Enable the OStree repo and sync it @Assert: ostree repo should be synced successfully from CDN @CaseLevel: Integration """ repos = self.sync.create_repos_tree(ATOMIC_HOST_TREE) org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) with Session(self.browser) as session: session.nav.go_to_select_org(org.name) session.nav.go_to_red_hat_repositories() self.sync.enable_rh_repos(repos, repo_tab=REPO_TAB['ostree']) session.nav.go_to_sync_status() self.assertTrue(self.sync.sync_noversion_rh_repos( PRDS['rhah'], [REPOS['rhaht']['name']] ))
def test_positive_current_subscription_totals(self): """Check if the Current Subscriptions Totals widget is working in the Dashboard UI :id: 6d0f56ff-7007-4cdb-96f3-d9e8b6cc1701 :Steps: 1. Make sure sat6 has some active subscriptions 2. Navigate to Monitor -> Dashboard 3. Review the Current Subscription Total widget :expectedresults: The widget displays all the active subscriptions and expired subscriptions details :CaseImportance: Critical """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) with Session(self.browser) as session: set_context(session, org=org.name) self.assertGreaterEqual(self.dashboard.get_cst_subs_count( 'Active Subscriptions'), 1) self.assertEqual(self.dashboard.get_cst_subs_count( 'Subscriptions Expiring in 120 Days'), 0) self.assertEqual(self.dashboard.get_cst_subs_count( 'Recently Expired Subscriptions'), 0)
def test_positive_copy_subscription(self): """Copy Activation key and verify contents @Feature: Activation key copy @Steps: 1. Create parent key and add content 2. Copy Activation key by passing id of parent 3. Verify content was successfully copied @Assert: Activation key is successfully copied """ # Begin test setup parent_ak = self._make_activation_key() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({"file": manifest.filename, "organization-id": self.org["id"]}) subscription_result = Subscription.list({"organization-id": self.org["id"]}, per_page=False) ActivationKey.add_subscription({u"id": parent_ak["id"], u"subscription-id": subscription_result[0]["id"]}) # End test setup new_name = gen_string("utf8") result = ActivationKey.copy({u"id": parent_ak["id"], u"new-name": new_name, u"organization-id": self.org["id"]}) self.assertEqual(result[0], u"Activation key copied") result = ActivationKey.subscriptions({u"name": new_name, u"organization-id": self.org["id"]}) # Verify that the subscription copied over self.assertIn(subscription_result[0]["name"], result[3]) # subscription name # subscription list
def test_verify_bugzilla_1225588(self): """Create Organization with valid values and upload manifest. Then try to delete that organization. :id: 851c8557-a406-4a70-9c8b-94bcf0482f8d :expectedresults: Organization is deleted successfully. :CaseLevel: Integration :CaseImportance: Critical """ org_name = gen_string('alphanumeric') org = entities.Organization(name=org_name).create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) with Session(self) as session: make_lifecycle_environment(session, org_name, name='DEV') make_lifecycle_environment( session, org_name, name='QE', prior='DEV' ) # Org cannot be deleted when selected, # So switching to Default Org and then deleting. session.nav.go_to_select_org('Default Organization') self.org.delete(org_name, dropdown_present=True) for _ in range(10): status = self.org.search(org_name) if status is None: break self.assertIsNone(status)
def test_positive_delete_bz1225588(self, org_name): """@test: Create Organization with valid values and upload manifest. Then try to delete that organization. @feature: Organization Positive Delete Test. @assert: Organization is deleted successfully. """ org = entities.Organization(name=org_name).create() with open(manifests.clone(), 'rb') as manifest: upload_manifest(org.id, manifest) with Session(self.browser) as session: make_lifecycle_environment(session, org_name, name='DEV') make_lifecycle_environment( session, org_name, name='QE', prior='DEV' ) # Org cannot be deleted when selected, # So switching to Default Org and then deleting. session.nav.go_to_select_org('Default Organization') self.org.remove(org_name) session.nav.go_to_dashboard() status = self.org.search(org_name) # Check for at least ten times that org is deleted due #1225588 for _ in range(10): status = self.org.search(org_name) if status is None: break self.assertIsNone(status)
def setup_to_create_cv(self, repo_name=None, repo_url=None, repo_type=None, rh_repo=None, org_id=None): """Create product/repo and sync it""" if not rh_repo: repo_name = repo_name or gen_string('alpha') # Creates new custom product via API's product = entities.Product( organization=org_id or self.organization ).create() # Creates new custom repository via API's repo_id = entities.Repository( name=repo_name, url=(repo_url or FAKE_1_YUM_REPO), content_type=(repo_type or REPO_TYPE['yum']), product=product, ).create().id elif rh_repo: # Uploads the manifest and returns the result. with open(manifests.clone(), 'rb') as manifest: upload_manifest(org_id, manifest) # Enables the RedHat repo and fetches it's Id. repo_id = enable_rhrepo_and_fetchid( basearch=rh_repo['basearch'], org_id=str(org_id), # OrgId is passed as data in API hence str product=rh_repo['product'], repo=rh_repo['name'], reposet=rh_repo['reposet'], releasever=rh_repo['releasever'], ) # Sync repository entities.Repository(id=repo_id).sync()
def test_add_subscription(self): """@Test: Test that subscription can be added to activation key @Feature: Activation key - Host @Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription @Assert: Subscription successfully added to activation key """ manifest = manifests.clone() upload_file(manifest, remote_file=manifest) org_id = make_org()['id'] ackey_id = self._make_activation_key()['id'] Subscription.upload({ 'file': manifest, 'organization-id': org_id, }) subs_id = Subscription.list( {'organization-id': org_id}, per_page=False) result = ActivationKey.add_subscription({ u'id': ackey_id, u'subscription-id': subs_id[0]['id'], }) self.assertIn('Subscription added to activation key', result)
def test_positive_reposet_disable(self): """@Test: Disable repo from reposet @Feature: Repository-set @Assert: Repository was disabled """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] reposet = entities.RepositorySet( name=REPOSET['rhva6'], product=product, ).search()[0] reposet.enable(data={'basearch': 'x86_64', 'releasever': '6Server'}) reposet.disable(data={'basearch': 'x86_64', 'releasever': '6Server'}) repositories = reposet.available_repositories()['results'] self.assertFalse([ repo['enabled'] for repo in repositories if (repo['substitutions']['basearch'] == 'x86_64' and repo['substitutions']['releasever'] == '6Server') ][0])
def module_org(): org = entities.Organization(name="insights_{0}".format( gen_string("alpha", 6))).create() with manifests.clone() as manifest: up_man(org.id, manifest.content) yield org org.delete()
def test_positive_synchronize_rh_product_past_sync_date(self): """Create a sync plan with past datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence :id: 47280ef4-3936-4dbc-8ed0-1076aa8d40df :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 80 org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': org['id'], 'sync-date': ( datetime.utcnow() - timedelta(interval - delay/2) ).strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Verify product has not been synced yet sleep(delay/4) self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds plan_name = gen_string('alpha') org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() startdate = datetime.now() with Session(self.browser) as session: make_syncplan( session, org=org.name, name=plan_name, description='sync plan create with start time', interval=u'hourly', start_hour=startdate.strftime('%H'), start_minute=startdate.strftime('%M'), ) # Associate sync plan with product self.syncplan.update( plan_name, add_products=[PRDS['rhel']]) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], after_sync=False, ) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], )
def test_positive_create_composite(session): # Note: puppet repos cannot/should not be used in this test # It shouldn't work - and that is tested in a different case. # Individual modules from a puppet repo, however, are a valid # variation. """Create a composite content views :id: 550f1970-5cbd-4571-bb7b-17e97639b715 :setup: sync multiple content source/types (RH, custom, etc.) :expectedresults: Composite content views are created :CaseLevel: System """ puppet_module = 'httpd' cv_name1 = gen_string('alpha') cv_name2 = gen_string('alpha') composite_name = gen_string('alpha') rh_repo = { 'name': REPOS['rhst7']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhst7'], 'basearch': 'x86_64', 'releasever': None, } # Create new org to import manifest org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) enable_sync_redhat_repo(rh_repo, org.id) create_sync_custom_repo( org.id, repo_url=FAKE_0_PUPPET_REPO, repo_type=REPO_TYPE['puppet']) with session: session.organization.select(org.name) # Create content views for cv_name in (cv_name1, cv_name2): session.contentview.create({'name': cv_name}) assert session.contentview.search(cv_name)[0]['Name'] == cv_name session.contentview.add_puppet_module(cv_name1, puppet_module) cv1 = session.contentview.read(cv_name1) assert cv1['puppet_modules']['table'][0]['Name'] == puppet_module session.contentview.publish(cv_name1) # fixme: drop next line after airgun#63 is solved session.contentview.search(cv_name2) session.contentview.add_yum_repo(cv_name2, rh_repo['name']) session.contentview.publish(cv_name2) session.contentview.create({ 'name': composite_name, 'composite_view': True, }) for cv_name in (cv_name1, cv_name2): session.contentview.add_cv(composite_name, cv_name) composite_cv = session.contentview.read(composite_name) assert {cv_name1, cv_name2} == set([ cv['Name'] for cv in composite_cv['content_views']['resources']['assigned'] ])
def test_positive_candlepin_events_processed_by_STOMP(session, rhel7_contenthost, target_sat): """Verify that Candlepin events are being read and processed by attaching subscriptions, validating host subscriptions status, and viewing processed and failed Candlepin events :id: 9510fd1c-2efb-4132-8665-9a72273cd1af :steps: 1. Register Content Host without subscriptions attached 2. Verify subscriptions status is red "invalid" 3. Import a Manifest 4. Attach subs to content host 5. Verify subscription status is green "valid" 6. Check for processed and failed Candlepin events :expectedresults: Candlepin events are being read and processed correctly without any failures :BZ: 1826515 :parametrized: yes :CaseImportance: High """ org = entities.Organization().create() repo = entities.Repository(product=entities.Product( organization=org).create()).create() repo.sync() ak = entities.ActivationKey( content_view=org.default_content_view, max_hosts=100, organization=org, environment=entities.LifecycleEnvironment(id=org.library.id), ).create() rhel7_contenthost.install_katello_ca(target_sat) rhel7_contenthost.register_contenthost(org.name, ak.name) with session: session.organization.select(org_name=org.name) host = session.contenthost.read(rhel7_contenthost.hostname, widget_names='details')['details'] assert 'Unentitled' in host['subscription_status'] with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) session.contenthost.add_subscription(rhel7_contenthost.hostname, DEFAULT_SUBSCRIPTION_NAME) session.browser.refresh() updated_sub_status = session.contenthost.read( rhel7_contenthost.hostname, widget_names='details')['details']['subscription_status'] assert 'Fully entitled' in updated_sub_status response = entities.Ping().search_json( )['services']['candlepin_events'] assert response['status'] == 'ok' assert '0 Failed' in response['message']
def test_positive_candlepin_events_processed_by_stomp(rhel7_contenthost, function_org, default_sat): """Verify that Candlepin events are being read and processed by attaching subscriptions, validating host subscriptions status, and viewing processed and failed Candlepin events :id: efd20ffd-8f98-4536-abb6-d080f9d23169 :steps: 1. Add subscriptions to content host 2. Verify subscription status is invalid at <your-satellite-url>/api/v2/hosts 3. Import a Manifest 4. Attach subs to content host 5. Verify subscription status is valid 6. Check ping api for processed and failed events /katello/api/v2/ping :expectedresults: Candlepin events are being read and processed correctly without any failures :BZ: 1826515 :CaseImportance: High """ repo = entities.Repository(product=entities.Product( organization=function_org).create()).create() repo.sync() ak = entities.ActivationKey( content_view=function_org.default_content_view, max_hosts=100, organization=function_org, environment=entities.LifecycleEnvironment(id=function_org.library.id), auto_attach=True, ).create() rhel7_contenthost.install_katello_ca(default_sat) rhel7_contenthost.register_contenthost(function_org.name, ak.name) host = entities.Host().search( query={'search': f'name={rhel7_contenthost.hostname}'}) host_id = host[0].id host_content = entities.Host(id=host_id).read_json() assert host_content['subscription_status'] == 2 with manifests.clone() as manifest: upload_manifest(function_org.id, manifest.content) subscription = entities.Subscription(organization=function_org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'})[0] entities.HostSubscription(host=host_id).add_subscriptions( data={'subscriptions': [{ 'id': subscription.cp_id, 'quantity': 1 }]}) host_content = entities.Host(id=host_id).read_json() assert host_content['subscription_status'] == 0 response = entities.Ping().search_json()['services']['candlepin_events'] assert response['status'] == 'ok' assert '0 Failed' in response['message']
def test_positive_end_to_end(session): """Upload a manifest with minimal input parameters, attempt to delete it with checking the warning message and hit 'Cancel' button after than delete it. :id: 580fc072-01e0-4f83-8cbb-2a8522d76243 :expectedresults: 1. The manifest was uploaded successfully. 2. Manifest import is reflected at the dashboard 3. When attempting to delete the manifest the confirmation dialog contains informative message which warns user about downsides and consequences of manifest deletion. 4. When hitting cancel the manifest was not deleted. 5. When deleting and confirming deletion, the manifest was deleted successfully. :BZ: 1266827 :CaseImportance: Critical """ expected_message_lines = [ 'Are you sure you want to delete the manifest?', 'Note: Deleting a subscription manifest is STRONGLY discouraged. ' 'Deleting a manifest will:', 'Delete all subscriptions that are attached to running hosts.', 'Delete all subscriptions attached to activation keys.', 'Disable Red Hat Insights.', 'Require you to upload the subscription-manifest and re-attach ' 'subscriptions to hosts and activation keys.', 'This action should only be taken in extreme circumstances or for debugging purposes.', ] org = entities.Organization().create() _, temporary_local_manifest_path = mkstemp(prefix='manifest-', suffix='.zip') with manifests.clone() as manifest: with open(temporary_local_manifest_path, 'wb') as file_handler: file_handler.write(manifest.content.read()) with session: session.organization.select(org.name) # Ignore "404 Not Found" as server will connect to upstream subscription service to verify # the consumer uuid, that will be displayed in flash error messages # Note: this happen only when using clone manifest. session.subscription.add_manifest( temporary_local_manifest_path, ignore_error_messages=['404 Not Found'] ) assert session.subscription.has_manifest # dashboard check subscription_values = session.dashboard.read('SubscriptionStatus')['subscriptions'] assert subscription_values[0]['Subscription Status'] == 'Active Subscriptions' assert int(subscription_values[0]['Count']) >= 1 assert subscription_values[1]['Subscription Status'] == 'Subscriptions Expiring in 120 Days' assert int(subscription_values[1]['Count']) == 0 assert subscription_values[2]['Subscription Status'] == 'Recently Expired Subscriptions' assert int(subscription_values[2]['Count']) == 0 # manifest delete testing delete_message = session.subscription.read_delete_manifest_message() assert ' '.join(expected_message_lines) == delete_message assert session.subscription.has_manifest session.subscription.delete_manifest(ignore_error_messages=['404 Not Found']) assert not session.subscription.has_manifest
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. @id: 193d0159-d4a7-4f50-b037-7289f4576ade @Assert: Product is synchronized successfully. @CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds plan_name = gen_string('alpha') org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() startdate = self.get_client_datetime() + timedelta(seconds=delay) with Session(self.browser) as session: make_syncplan( session, org=org.name, name=plan_name, description='sync plan create with start time', interval=u'hourly', start_hour=startdate.strftime('%H'), start_minute=startdate.strftime('%M'), ) # Associate sync plan with product self.syncplan.update(plan_name, add_products=[PRDS['rhel']]) # Wait half of expected time sleep(delay / 2) # Verify product has not been synced yet self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], after_sync=False, ) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], )
def setup_content_rhel6(): """Setup content fot rhel6 content host Using `Red Hat Enterprise Virtualization Agents for RHEL 6 Server (RPMs)` from manifest, SATTOOLS_REPO for host-tools and yum_9 repo as custom repo. :return: Activation Key, Organization, subscription list """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id_rhva = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=constants.PRDS['rhel'], repo=constants.REPOS['rhva6']['name'], reposet=constants.REPOSET['rhva6'], releasever=constants.DEFAULT_RELEASE_VERSION, ) rh_repo = entities.Repository(id=rh_repo_id_rhva).read() rh_repo.sync() host_tools_product = entities.Product(organization=org).create() host_tools_repo = entities.Repository( product=host_tools_product, ).create() host_tools_repo.url = settings.repos.SATTOOLS_REPO.RHEL6 host_tools_repo = host_tools_repo.update(['url']) host_tools_repo.sync() custom_product = entities.Product(organization=org).create() custom_repo = entities.Repository( product=custom_product, ).create() custom_repo.url = CUSTOM_REPO_URL custom_repo = custom_repo.update(['url']) custom_repo.sync() lce = entities.LifecycleEnvironment(organization=org).create() cv = entities.ContentView( organization=org, repository=[rh_repo_id_rhva, host_tools_repo.id, custom_repo.id], ).create() cv.publish() cvv = cv.read().version[0].read() promote(cvv, lce.id) ak = entities.ActivationKey(content_view=cv, organization=org, environment=lce).create() sub_list = [DEFAULT_SUBSCRIPTION_NAME, host_tools_product.name, custom_product.name] for sub_name in sub_list: subscription = entities.Subscription(organization=org).search( query={'search': f'name="{sub_name}"'} )[0] ak.add_subscriptions(data={'subscription_id': subscription.id}) return ak, org, sub_list
def manifest_org(org): """Upload a manifest to the organization.""" with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'] }) return org
def test_positive_synchronize_rh_product_future_sync_date(module_org): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 6697a00f-2181-4c2b-88eb-2333268d780b :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 2 * 60 # delay for sync date in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload(data={'organization_id': org.id}, files={'content': manifest.content}) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] repo = entities.Repository(id=repo_id).read() # BZ:1695733 is closed WONTFIX so apply this workaround logger.info('Need to set seconds to zero because BZ#1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta(seconds=delay) sync_plan = entities.SyncPlan(organization=org, enabled=True, interval='hourly', sync_date=sync_date).create() # Create and Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(repo.id, max_tries=1) validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait quarter of expected time logger.info( f'Waiting {delay / 4} seconds to check product {product.name} was not synced' ) sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(repo.id, max_tries=1) validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time logger.info( f'Waiting {delay * 3 / 4} seconds to check product {product.name} was synced' ) sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(repo.id, repo_backend_id=repo.backend_identifier) validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def manifest_org(org, target_sat): """Upload a manifest to the organization.""" with manifests.clone() as manifest: target_sat.put(manifest, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'] }) return org
def module_org(): org_name = f'insights_{gen_string("alpha", 6)}' org = nailgun.entities.Organization(name=org_name).create() nailgun.entities.Parameter(name='remote_execution_connect_by_ip', value='Yes', organization=org.id).create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) yield org
def test_positive_expired_SCA_cert_handling(module_org, rhel7_contenthost): """Verify that a content host with an expired SCA cert can re-register successfully :id: 27bca6b8-dd9c-4977-81d2-319588ee59b3 :steps: 1. Import an SCA-enabled manifest 2. Register a content host to the Default Organization View using an activation key 3. Unregister the content host 4. Enable and synchronize a repository 5. Re-register the host using the same activation key as in step 3 above :expectedresults: the host is re-registered successfully and its SCA entitlement certificate is refreshed :CustomerScenario: true :Assignee: dsynk :BZ: 1949353 :CaseImportance: High """ with manifests.clone(name='golden_ticket') as manifest: upload_manifest(module_org.id, manifest.content) ak = entities.ActivationKey( content_view=module_org.default_content_view, max_hosts=100, organization=module_org, environment=entities.LifecycleEnvironment(id=module_org.library.id), auto_attach=True, ).create() # registering the content host with no content enabled/synced in the org # should create a client SCA cert with no content rhel7_contenthost.install_katello_ca() rhel7_contenthost.register_contenthost(org=module_org.label, activation_key=ak.name) assert rhel7_contenthost.subscribed rhel7_contenthost.unregister() # syncing content with the content host unregistered should invalidate # the previous client SCA cert rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() # re-registering the host should test whether Candlepin gracefully handles # registration of a host with an expired SCA cert rhel7_contenthost.register_contenthost(module_org.label, ak.name) assert rhel7_contenthost.subscribed
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence :id: 47280ef4-3936-4dbc-8ed0-1076aa8d40df :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_synchronize_rh_product_past_sync_date(self): """Create a sync plan with past datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence :id: 080c316d-4a06-4ee9-b5f6-1b210d8d0593 :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload(data={'organization_id': org.id}, files={'content': manifest.content}) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval='hourly', sync_date=datetime.utcnow() - timedelta(seconds=interval - delay), ).create() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Wait quarter of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay / 4, product.name)) sleep(delay / 4) # Verify product has not been synced yet with self.assertRaises(AssertionError): self.validate_task_status(repo.id, max_tries=1) self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait until the next recurrence self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format((delay * 3 / 4), product.name)) sleep(delay * 3 / 4) # Verify product was synced successfully self.validate_task_status(repo.id, repo_backend_id=repo.backend_identifier) self.validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 6ce2f777-f230-4bb8-9822-2cf3580c21aa :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)) .strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Verify product is not synced and doesn't have any content self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(delay/2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay/2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_create_composite(session): # Note: puppet repos cannot/should not be used in this test # It shouldn't work - and that is tested in a different case. # Individual modules from a puppet repo, however, are a valid # variation. """Create a composite content views :id: 550f1970-5cbd-4571-bb7b-17e97639b715 :setup: sync multiple content source/types (RH, custom, etc.) :expectedresults: Composite content views are created :CaseLevel: System """ puppet_module = 'httpd' cv_name1 = gen_string('alpha') cv_name2 = gen_string('alpha') composite_name = gen_string('alpha') rh_repo = { 'name': REPOS['rhst7']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhst7'], 'basearch': 'x86_64', 'releasever': None, } # Create new org to import manifest org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) enable_sync_redhat_repo(rh_repo, org.id) create_sync_custom_repo(org.id, repo_url=FAKE_0_PUPPET_REPO, repo_type=REPO_TYPE['puppet']) with session: session.organization.select(org.name) # Create content views for cv_name in (cv_name1, cv_name2): session.contentview.create({'name': cv_name}) assert session.contentview.search(cv_name)[0]['Name'] == cv_name session.contentview.add_puppet_module(cv_name1, puppet_module) cv1 = session.contentview.read(cv_name1) assert cv1['puppet_modules']['table'][0]['Name'] == puppet_module session.contentview.publish(cv_name1) session.contentview.add_yum_repo(cv_name2, rh_repo['name']) session.contentview.publish(cv_name2) session.contentview.create({ 'name': composite_name, 'composite_view': True, }) for cv_name in (cv_name1, cv_name2): session.contentview.add_cv(composite_name, cv_name) composite_cv = session.contentview.read(composite_name) assert {cv_name1, cv_name2} == set([ cv['Name'] for cv in composite_cv['content_views']['resources']['assigned'] ])
def test_positive_create(self): """Upload a manifest. @id: 6faf9d96-9b45-4bdc-afa9-ec3fbae83d41 @Assert: Manifest is uploaded successfully """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content)
def module_gt_manifest_org(): """Creates a new org and loads GT manifest in the new org""" org = entities.Organization().create() manifest = manifests.clone(org_environment_access=True, name='golden_ticket') manifests.upload_manifest_locked(org.id, manifest, interface=manifests.INTERFACE_CLI) org.manifest_filename = manifest.filename return org
def test_positive_create(self): """Upload a manifest. @Assert: Manifest is uploaded successfully @Feature: Subscriptions """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content)
def test_select_customizable_columns_uncheck_and_checks_all_checkboxes( session): """Ensures that no column headers from checkboxes show up in the table after unticking everything from selectable customizable column :id: 88e140c7-ab4b-4d85-85bd-d3eff12162d7 :steps: 1. Login and go to Content -> Subscription 2. Click selectable customizable column icon next to search button 3. Iterate through list of checkboxes 4. Unchecks all ticked checkboxes 5. Verify that the table header column doesn't have any headers from checkboxes Note: Table header will always contain 'Select all rows' header in html, but will not be displayed in UI :expectedresults: 1. No column headers show up :CaseImportance: Medium """ checkbox_dict = { 'Name': False, 'Type': False, 'SKU': False, 'Contract': False, 'Start Date': False, 'End Date': False, 'Requires Virt-Who': False, 'Consumed': False, 'Entitlements': False, } org = entities.Organization().create() _, temporary_local_manifest_path = mkstemp(prefix='manifest-', suffix='.zip') with manifests.clone() as manifest: with open(temporary_local_manifest_path, 'wb') as file_handler: file_handler.write(manifest.content.read()) with session: session.organization.select(org.name) # Ignore "404 Not Found" as server will connect to upstream subscription service to verify # the consumer uuid, that will be displayed in flash error messages # Note: this happen only when using clone manifest. session.subscription.add_manifest( temporary_local_manifest_path, ignore_error_messages=['404 Not Found']) headers = session.subscription.filter_columns(checkbox_dict) assert headers[0] not in list(checkbox_dict) time.sleep(3) checkbox_dict.update((k, True) for k in checkbox_dict) col = session.subscription.filter_columns(checkbox_dict) assert set(col[1:]) == set(checkbox_dict)
def setUpClass(cls): """Creates the pre-requisites for the Incremental updates that used in all test""" super(IncrementalUpdateTestCase, cls).setUpClass() # Create a new Organization cls.org = Organization(name=gen_alpha()).create() # Create two lifecycle environments - DEV, QE cls.dev_lce = LifecycleEnvironment( name='DEV', organization=cls.org ).create() cls.qe_lce = LifecycleEnvironment( name='QE', prior=cls.dev_lce, organization=cls.org ).create() # Upload manifest with manifests.clone() as manifest: upload_manifest(cls.org.id, manifest.content) # Enable repositories - RHE Virtualization Agents and rhel6 sat6tools rhva_6_repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=cls.org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) rhel6_sat6tools_repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=cls.org.id, product=PRDS['rhel'], repo=REPOS['rhst6']['name'], reposet=REPOSET['rhst6'], releasever=None, ) # Read the repositories cls.rhva_6_repo = Repository(id=rhva_6_repo_id).read() cls.rhel6_sat6tools_repo = Repository( id=rhel6_sat6tools_repo_id ).read() # Sync the enabled repositories try: cls.old_task_timeout = entity_mixins.TASK_TIMEOUT # Update timeout to 15 minutes to finish sync entity_mixins.TASK_TIMEOUT = 900 for repo in [cls.rhva_6_repo, cls.rhel6_sat6tools_repo]: assert Repository(id=repo.id).sync()['result'] == u'success' finally: entity_mixins.TASK_TIMEOUT = cls.old_task_timeout
def test_positive_update_rh_product(session): """Update Content View in an Activation key :id: 9b0ac209-45de-4cc4-97e8-e191f3f37239 :Steps: 1. Create an activation key 2. Update the content view with another content view which has RH products :expectedresults: Activation key is updated :CaseLevel: Integration """ name = gen_string('alpha') env1_name = gen_string('alpha') env2_name = gen_string('alpha') cv1_name = gen_string('alpha') cv2_name = gen_string('alpha') rh_repo1 = { 'name': REPOS['rhva6']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': DEFAULT_ARCHITECTURE, 'releasever': DEFAULT_RELEASE_VERSION, } rh_repo2 = { 'name': ('Red Hat Enterprise Virtualization Agents for RHEL 6 Server RPMs i386 6Server'), 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': 'i386', 'releasever': DEFAULT_RELEASE_VERSION, } org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) repo1_id = enable_sync_redhat_repo(rh_repo1, org.id) cv_publish_promote(cv1_name, env1_name, repo1_id, org.id) repo2_id = enable_sync_redhat_repo(rh_repo2, org.id) cv_publish_promote(cv2_name, env2_name, repo2_id, org.id) with session: session.organization.select(org.name) session.activationkey.create( {'name': name, 'lce': {env1_name: True}, 'content_view': cv1_name} ) assert session.activationkey.search(name)[0]['Name'] == name ak = session.activationkey.read(name, widget_names='details') assert ak['details']['content_view'] == cv1_name session.activationkey.update( name, {'details': {'lce': {env2_name: True}, 'content_view': cv2_name}} ) ak = session.activationkey.read(name, widget_names='details') assert ak['details']['content_view'] == cv2_name
def test_positive_add_rh_and_custom_products(session): """Test that RH/Custom product can be associated to Activation keys :id: 3d8876fa-1412-47ca-a7a4-bce2e8baf3bc :Steps: 1. Create Activation key 2. Associate RH product(s) to Activation Key 3. Associate custom product(s) to Activation Key :expectedresults: RH/Custom product is successfully associated to Activation key :CaseLevel: Integration """ name = gen_string('alpha') rh_repo = { 'name': REPOS['rhva6']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': DEFAULT_ARCHITECTURE, 'releasever': DEFAULT_RELEASE_VERSION, } custom_product_name = gen_string('alpha') repo_name = gen_string('alpha') org = entities.Organization().create() product = entities.Product(name=custom_product_name, organization=org).create() repo = entities.Repository(name=repo_name, product=product).create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rhel_repo_id = enable_rhrepo_and_fetchid( basearch=rh_repo['basearch'], org_id=org.id, product=rh_repo['product'], repo=rh_repo['name'], reposet=rh_repo['reposet'], releasever=rh_repo['releasever'], ) for repo_id in [rhel_repo_id, repo.id]: entities.Repository(id=repo_id).sync() with session: session.organization.select(org.name) session.activationkey.create( {'name': name, 'lce': {ENVIRONMENT: True}, 'content_view': DEFAULT_CV} ) assert session.activationkey.search(name)[0]['Name'] == name for subscription in (DEFAULT_SUBSCRIPTION_NAME, custom_product_name): session.activationkey.add_subscription(name, subscription) ak = session.activationkey.read(name, widget_names='subscriptions') subscriptions = [ subscription['Repository Name'] for subscription in ak['subscriptions']['resources']['assigned'] ] assert {DEFAULT_SUBSCRIPTION_NAME, custom_product_name} == set(subscriptions)
def test_positive_create(self): """Upload a manifest. :id: 6faf9d96-9b45-4bdc-afa9-ec3fbae83d41 :expectedresults: Manifest is uploaded successfully :CaseImportance: Critical """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content)
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. @id: 6697a00f-2181-4c2b-88eb-2333268d780b @Assert: Product is synchronized successfully. @CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow() + timedelta(seconds=delay), ).create() repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product is not synced and doesn't have any content self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait half of expected time sleep(delay / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def test_positive_fetch_product_content(self): """Associate RH & custom product with AK and fetch AK's product content :id: 424f3dfb-0112-464b-b633-e8c9bce6e0f1 :expectedresults: Both Red Hat and custom product subscriptions are assigned as Activation Key's product content :BZ: 1426386 :CaseLevel: Integration :CaseImportance: Critical """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_repo = entities.Repository( product=entities.Product(organization=org).create(), ).create() custom_repo.sync() cv = entities.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() ak = entities.ActivationKey(content_view=cv, organization=org).create() org_subscriptions = entities.Subscription(organization=org).search() for subscription in org_subscriptions: provided_products_ids = [ prod.id for prod in subscription.read().provided_product ] if (custom_repo.product.id in provided_products_ids or rh_repo.product.id in provided_products_ids): ak.add_subscriptions(data={ 'quantity': 1, 'subscription_id': subscription.id, }) ak_subscriptions = ak.product_content()['results'] self.assertEqual( {custom_repo.product.id, rh_repo.product.id}, {subscr['product']['id'] for subscr in ak_subscriptions})
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @id: 080c316d-4a06-4ee9-b5f6-1b210d8d0593 @Assert: Product is synchronized successfully. @BZ: 1279539 @CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow(), ).create() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def test_positive_disable_by_id(self): """Disable repo from reposet by IDs of reposet, org and product :id: 0d6102ba-3fb9-4eb8-972e-d537e252a8e6 :expectedresults: Repository was disabled :CaseImportance: Critical """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) product_id = Product.info({ u'name': PRDS['rhel'], u'organization-id': org['id'], })['id'] reposet_id = RepositorySet.info({ u'name': REPOSET['rhva6'], u'organization-id': org['id'], u'product-id': product_id, })['id'] RepositorySet.enable({ u'basearch': 'x86_64', u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', }) RepositorySet.disable({ u'basearch': 'x86_64', u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'false')
def _upload_manifest(self, org_id, manifest=None): """Uploads a manifest into an organization. A cloned manifest will be used if ``manifest`` is None. """ if manifest is None: manifest = manifests.clone() upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, 'organization-id': org_id, }) manifest.content.close()
def module_rh_repo(module_org): manifests.upload_manifest_locked(module_org.id, manifests.clone()) rhst = SatelliteToolsRepository(cdn=True) repo_id = enable_rhrepo_and_fetchid( basearch=rhst.data['arch'], org_id=module_org.id, product=rhst.data['product'], repo=rhst.data['repository'], reposet=rhst.data['repository-set'], releasever=rhst.data['releasever'], ) entities.Repository(id=repo_id).sync() return entities.Repository(id=repo_id).read()
def golden_ticket_host_setup(request, module_org): with manifests.clone(name='golden_ticket') as manifest: upload_manifest(module_org.id, manifest.content) new_product = make_product({'organization-id': module_org.id}) new_repo = make_repository({'product-id': new_product['id']}) Repository.synchronize({'id': new_repo['id']}) new_ak = make_activation_key({ 'lifecycle-environment': 'Library', 'content-view': 'Default Organization View', 'organization-id': module_org.id, 'auto-attach': False, }) return new_ak
def test_positive_delete_subscription(self): """Check if deleting a subscription removes it from Activation key :id: bbbe4641-bfb0-48d6-acfc-de4294b18c15 :expectedresults: Deleting subscription removes it from the Activation key :CaseLevel: Integration """ new_ak = self._make_activation_key() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': self.org['id'], }) subscription_result = Subscription.list( { 'organization-id': self.org['id'], 'order': 'id desc' }, per_page=False) result = ActivationKey.add_subscription({ u'id': new_ak['id'], u'subscription-id': subscription_result[-1]['id'], }) self.assertIn('Subscription added to activation key', result) ak_subs_info = ActivationKey.subscriptions({ u'id': new_ak['id'], u'organization-id': self.org['id'], }) self.assertEqual(len(ak_subs_info), 6) result = ActivationKey.remove_subscription({ u'id': new_ak['id'], u'subscription-id': subscription_result[-1]['id'], }) self.assertIn('Subscription removed from activation key', result) ak_subs_info = ActivationKey.subscriptions({ u'id': new_ak['id'], u'organization-id': self.org['id'], }) self.assertEqual(len(ak_subs_info), 4)