def update_product_subscription_in_ak(product, yum_repo, ak, org): """ Updates given products subscription in given AK :param nailgun.entities.Product product: products name to calculate subscription id :param nailgun.entities.Repository yum_repo: yum repository :param nailgun.entities.ActivationKey ak: Ak :param nailgun.entities.Organization org: Organization """ cv_from_ak = ak.content_view cv = cv_from_ak.read() cv.repository.append(yum_repo) cv = cv.update(['repository']) cv.publish() cv = cv.read() # Published CV with new version # Promote CV environment = (entities.ActivationKey(organization=org).search( query={'search': 'name={}'.format(ak.name)})[0].environment) cvv = entities.ContentViewVersion(id=max([cvv.id for cvv in cv.version])).read() cvv.promote(data={'environment_id': environment.id, 'force': False}) subscription = entities.Subscription(organization=org).search( query={'search': 'name={}'.format(product.name)})[0] ak.add_subscriptions(data={'subscription_id': subscription.id})
def test_positive_download_debug_cert_after_refresh(session): """Create organization with valid manifest. Download debug certificate for that organization and refresh added manifest for few times in a row :id: 1fcd7cd1-8ba1-434f-b9fb-c4e920046eb4 :expectedresults: Scenario passed successfully :CaseLevel: Integration :CaseImportance: Critical """ org = entities.Organization().create() try: upload_manifest_locked(org.id, original_manifest()) with session: session.organization.select(org.name) for _ in range(3): assert org.download_debug_certificate() session.subscription.refresh_manifest() finally: entities.Subscription(organization=org).delete_manifest( data={'organization_id': org.id})
def test_post_manifest_scenario_refresh(self): """Post-upgrade scenario that verifies manifest refreshed successfully and deleted successfully. :id: postupgrade-29b246aa-2c7f-49f4-870a-7a0075e184b1 :steps: 1. Refresh manifest 2. Delete manifest :expectedresults: 1. The manifest should refresh and delete successfully. """ org = entities.Organization().search( query={'search': 'name={0}'.format(self.org_name)})[0] sub = entities.Subscription(organization=org) sub.refresh_manifest(data={'organization_id': org.id}) self.assertGreater(len(sub.search()), 0) delete_manifest(self.org_name) history = hammer.hammer('subscription manifest-history' ' --organization {0}'.format(self.org_name)) self.assertIn( "Subscriptions deleted by foreman_admin".format(self.org_name), history[0]['status message'])
def setup_content(module_org): with manifests.clone() as manifest: upload_manifest(module_org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_product = entities.Product(organization=module_org).create() custom_repo = entities.Repository(name=gen_string('alphanumeric').upper(), product=custom_product).create() custom_repo.sync() lce = entities.LifecycleEnvironment(organization=module_org).create() cv = entities.ContentView( organization=module_org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() cvv = cv.read().version[0].read() promote(cvv, lce.id) ak = entities.ActivationKey(content_view=cv, organization=module_org, environment=lce, auto_attach=True).create() subscription = entities.Subscription(organization=module_org).search( query={'search': 'name="{}"'.format(DEFAULT_SUBSCRIPTION_NAME)})[0] ak.add_subscriptions(data={ 'quantity': 1, 'subscription_id': subscription.id }) return module_org, ak
def setUpClass(cls): # noqa super(RHAITestCase, cls).setUpClass() # Create a new organization with prefix 'insights' org = entities.Organization( name='insights_{0}'.format(gen_string('alpha', 6))).create() # Upload manifest with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) # Create activation key using default CV and library environment activation_key = entities.ActivationKey( auto_attach=True, content_view=org.default_content_view.id, environment=org.library.id, name=gen_string('alpha'), organization=org, ).create() # Walk through the list of subscriptions. # Find the "Red Hat Employee Subscription" and attach it to the # recently-created activation key. for subs in entities.Subscription(organization=org).search(): if subs.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME: # 'quantity' must be 1, not subscription['quantity']. Greater # values produce this error: "RuntimeError: Error: Only pools # with multi-entitlement product subscriptions can be added to # the activation key with a quantity greater than one." activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': subs.id, }) break cls.org_label = org.label cls.ak_name = activation_key.name cls.org_name = org.name
def test_positive_run_packages_and_services_job(self, fixture_vmsetup, module_org): """Tests Ansible REX job can install packages and start services :id: 47ed82fb-77ca-43d6-a52e-f62bae5d3a42 :Steps: 0. Create a VM and register to SAT and prepare for REX (ssh key) 1. Run Ansible Package job for the host to install a package 2. Check the package is present at the host 3. Run Ansible Service job for the host to start a service 4. Check the service is started on the host :expectedresults: multiple asserts along the code :CaseAutomation: Automated :CaseLevel: System :bz: 1872688, 1811166 :customerscenario: true :parametrized: yes """ self.org = module_org client = fixture_vmsetup packages = ["cow"] # Create a custom repo repo = entities.Repository( content_type='yum', product=entities.Product(organization=self.org).create(), url=settings.repos.yum_0.url, ).create() repo.sync() prod = repo.product.read() subs = entities.Subscription(organization=self.org).search( query={'search': f'name={prod.name}'}) assert len(subs), 'No subscriptions matching the product returned' ak = entities.ActivationKey( organization=self.org, content_view=self.org.default_content_view, environment=self.org.library, ).create() ak.add_subscriptions(data={'subscriptions': [{'id': subs[0].id}]}) client.register_contenthost(org=self.org.label, activation_key=ak.name) # install package invocation_command = make_job_invocation({ 'job-template': 'Package Action - Ansible Default', 'inputs': 'state=latest, name={}'.format(*packages), 'search-query': f'name ~ {client.hostname}', }) result = JobInvocation.info({'id': invocation_command['id']}) try: assert result['success'] == '1' except AssertionError: result = 'host output: {}'.format(' '.join( JobInvocation.get_output({ 'id': invocation_command['id'], 'host': client.hostname }))) raise AssertionError(result) result = client.run(f'rpm -q {" ".join(packages)}') assert result.status == 0 # start a service service = "postfix" client.execute( "sed -i 's/^inet_protocols.*/inet_protocols = ipv4/' /etc/postfix/main.cf", ) invocation_command = make_job_invocation({ 'job-template': 'Service Action - Ansible Default', 'inputs': f'state=started, name={service}', 'search-query': f"name ~ {client.hostname}", }) result = JobInvocation.info({'id': invocation_command['id']}) try: assert result['success'] == '1' except AssertionError: result = 'host output: {}'.format(' '.join( JobInvocation.get_output({ 'id': invocation_command['id'], 'host': client.hostname }))) raise AssertionError(result) result = client.execute(f"systemctl status {service}") assert result.status == 0
def create_activation_key_for_client_registration( ak_name, client_os, org, environment, sat_state): """Creates Activation key for client registration :param str ak_name: Activation key name :param str client_os: rhel6/rhel7 :param nailgun.entity.Organization org: Organization :param nailgun.entity.Environment environment: Environment :param str sat_state: pre or post :return nailgun.entity: Activation key """ client_os = client_os.upper() from_ver = os.environ.get('FROM_VERSION') rhel_prod_name = 'scenarios_rhel{}_prod'.format(client_os[-1]) rhel_repo_name = '{}_repo'.format(rhel_prod_name) rhel_url = os.environ.get('{}_CUSTOM_REPO'.format(client_os)) if rhel_url is None: raise ValueError('The RHEL Repo URL environment variable for OS {} ' 'is not provided!'.format(client_os)) rhel_prod = entities.Product( name=rhel_prod_name, organization=org.id).create() if sat_state.lower() == 'pre' and from_ver in ['6.1', '6.2']: rhel_repo = entities.Repository( name=rhel_repo_name, product=rhel_prod, url=rhel_url, content_type='yum' ).create() else: rhel_repo = entities.Repository( name=rhel_repo_name, product=rhel_prod, url=rhel_url, content_type='yum', verify_ssl_on_sync=False ).create() call_entity_method_with_timeout(rhel_repo.sync, timeout=1400) if sat_state.lower() == 'pre': product_name = 'Red Hat Enterprise Linux Server' repo_name = 'Red Hat Satellite Tools {0} for RHEL ' \ '{1} Server RPMs x86_64'.format(from_ver, client_os[-1]) tools_prod = entities.Product( organization=org.id ).search( query={ 'per_page': 1000, 'search': 'name="{}"'.format(product_name) } )[0] tools_repo = entities.Repository( organization=org.id, product=tools_prod ).search( query={ 'per_page': 1000, 'search': 'name="{}"'.format(repo_name) } )[0] elif sat_state.lower() == 'post': product_name = 'scenarios_tools_product' tools_repo_url = os.environ.get( 'TOOLS_URL_{}'.format(client_os.upper())) if tools_repo_url is None: raise ValueError('The Tools Repo URL environment variable for ' 'OS {} is not provided!'.format(client_os)) repo_name = '{}_repo'.format(product_name) tools_prod = entities.Product( organization=org.id ).search(query={'search': 'name={}'.format(product_name)}) if not tools_prod: tools_prod = entities.Product( name=product_name, organization=org.id).create() tools_repo = entities.Repository( name=repo_name, product=tools_prod, url=tools_repo_url, content_type='yum' ).create() tools_repo.sync() else: tools_repo = entities.Repository( organization=org.id, product=tools_prod ).search(query={'search': 'name={}'.format(repo_name)}) tools_cv = entities.ContentView( name=ak_name + '_cv', label=ak_name + '_cv', organization=org.id ).create() tools_cv.repository = [tools_repo, rhel_repo] tools_cv = tools_cv.update(['repository']) tools_cv.publish() tools_cv = tools_cv.read() # Published CV with new version # Promote CV cvv = entities.ContentViewVersion( id=max([cvv.id for cvv in tools_cv.version]) ).read() cvv.promote( data={ u'environment_id': environment.id, u'force': False } ) tools_ak = entities.ActivationKey( name=ak_name, content_view=tools_cv, organization=org.id, environment=environment ).create() if sat_state == 'pre': tools_sub = 'Red Hat Satellite Employee Subscription' tools_content = 'rhel-{0}-server-satellite-tools-{1}-rpms'.format( client_os[-1], from_ver) else: tools_sub = tools_prod.name tools_subscription = entities.Subscription(organization=org.id).search( query={ 'search': 'name="{}"'.format(tools_sub), 'per_page': 1000 } )[0] rhel_subscription = entities.Subscription(organization=org.id).search( query={ 'search': 'name={}'.format(rhel_prod.name), 'per_page': 1000 } )[0] tools_ak.add_subscriptions(data={ 'subscription_id': tools_subscription.id}) if sat_state == 'pre': tools_ak.content_override(data={ 'content_override': { u'content_label': tools_content, u'value': u'1' }} ) tools_ak.add_subscriptions(data={ 'subscription_id': rhel_subscription.id}) return tools_ak
def configure_puppet_test(): sat6_hostname = settings.server.hostname repo_values = [ { 'repo': REPOS['rhst6']['name'], 'reposet': REPOSET['rhst6'] }, { 'repo': REPOS['rhst7']['name'], 'reposet': REPOSET['rhst7'] }, ] # step 1: Create new organization and environment. org = entities.Organization(name=gen_string('alpha')).create() loc = entities.Location(name=DEFAULT_LOC).search()[0].read() puppet_env = entities.Environment(name='production').search()[0].read() puppet_env.location.append(loc) puppet_env.organization.append(org) puppet_env = puppet_env.update(['location', 'organization']) Proxy.import_classes({ u'environment': puppet_env.name, u'name': sat6_hostname, }) env = entities.LifecycleEnvironment(organization=org, name=gen_string('alpha')).create() # step 2: Clone and Upload manifest with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) # step 3: Sync RedHat Sattools RHEL6 and RHEL7 repository repos = [ entities.Repository(id=enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=value['repo'], reposet=value['reposet'], releasever=None, )) for value in repo_values ] for repo in repos: repo.sync() # step 4: Create content view content_view = entities.ContentView(organization=org, name=gen_string('alpha')).create() # step 5: Associate repository to new content view content_view.repository = repos content_view = content_view.update(['repository']) # step 6: Publish content view and promote to lifecycle env. content_view.publish() content_view = content_view.read() promote(content_view.version[0], env.id) # step 7: Create activation key ak_name = gen_string('alpha') activation_key = entities.ActivationKey( name=ak_name, environment=env, organization=org, content_view=content_view, ).create() # step 7.1: Walk through the list of subscriptions. # Find the "Employee SKU" and attach it to the # recently-created activation key. for sub in entities.Subscription(organization=org).search(): if sub.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME: # 'quantity' must be 1, not subscription['quantity']. Greater # values produce this error: "RuntimeError: Error: Only pools # with multi-entitlement product subscriptions can be added to # the activation key with a quantity greater than one." activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': sub.id, }) break for content_label in [REPOS['rhst6']['id'], REPOS['rhst7']['id']]: # step 7.2: Enable product content activation_key.content_override( data={ 'content_override': { u'content_label': content_label, u'value': u'1', } }) return { 'org_name': org.name, 'cv_name': content_view.name, 'sat6_hostname': settings.server.hostname, 'ak_name': ak_name, 'env_name': env.name, }
def test_positive_run_packages_and_services_job(self, fixture_vmsetup, fixture_org): """Tests Ansible REX job can install packages and start services :id: 47ed82fb-77ca-43d6-a52e-f62bae5d3a42 :Steps: 0. Create a VM and register to SAT and prepare for REX (ssh key) 1. Run Ansible Package job for the host to install a package 2. Check the package is present at the host 3. Run Ansible Service job for the host to start a service 4. Check the service is started on the host :expectedresults: multiple asserts along the code :CaseAutomation: automated :CaseLevel: System """ self.org = fixture_org self.client = fixture_vmsetup # set connecting to host by ip Host.set_parameter({ 'host': self.client.hostname, 'name': 'remote_execution_connect_by_ip', 'value': 'True', }) packages = ["cow"] # Create a custom repo repo = entities.Repository( content_type='yum', product=entities.Product(organization=self.org).create(), url=FAKE_0_YUM_REPO, ).create() repo.sync() prod = repo.product.read() subs = entities.Subscription().search( query={'search': 'name={0}'.format(prod.name)}) assert len(subs) > 0, 'No subscriptions matching the product returned' ak = entities.ActivationKey(organization=self.org, content_view=self.org.default_content_view, environment=self.org.library).create() ak.add_subscriptions(data={'subscriptions': [{'id': subs[0].id}]}) self.client.register_contenthost(org=self.org.label, activation_key=ak.name) # install package invocation_command = make_job_invocation({ 'job-template': 'Package Action - Ansible Default', 'inputs': 'state=latest, name={}'.format(*packages), 'search-query': "name ~ {0}".format(self.client.hostname), }) try: assert invocation_command['success'] == u'1' except AssertionError: result = 'host output: {0}'.format(' '.join( JobInvocation.get_output({ 'id': invocation_command[u'id'], 'host': self.client.hostname }))) raise AssertionError(result) result = ssh.command("rpm -q {0}".format(*packages), hostname=self.client.ip_addr) assert result.return_code == 0 # start a service service = "postfix" ssh.command( "sed -i 's/^inet_protocols.*/inet_protocols = ipv4/' /etc/postfix/main.cf", hostname=self.client.ip_addr) invocation_command = make_job_invocation({ 'job-template': 'Service Action - Ansible Default', 'inputs': 'state=started, name={}'.format(service), 'search-query': "name ~ {0}".format(self.client.hostname), }) try: assert invocation_command['success'] == u'1' except AssertionError: result = 'host output: {0}'.format(' '.join( JobInvocation.get_output({ 'id': invocation_command[u'id'], 'host': self.client.hostname }))) raise AssertionError(result) result = ssh.command("systemctl status {0}".format(service), hostname=self.client.ip_addr) assert result.return_code == 0
def setUp(self): """Creates the pre-requisites for the Incremental updates that used per each test""" super(IncrementalUpdateTestCase, self).setUp() # Create content view that will be used filtered erratas self.rhel_6_partial_cv = entities.ContentView( organization=self.org, name=gen_alpha(), repository=[self.rhva_6_repo, self.rhel6_sat6tools_repo]).create() # Create a content view filter to filter out errata rhel_6_partial_cvf = entities.ErratumContentViewFilter( content_view=self.rhel_6_partial_cv, type='erratum', name='rhel_6_partial_cv_filter', repository=[self.rhva_6_repo]).create() # Create a content view filter rule - filtering out errata in the last # 365 days start_date = (date.today() - timedelta(days=365)).strftime('%Y-%m-%d') entities.ContentViewFilterRule( content_view_filter=rhel_6_partial_cvf, types=['security', 'enhancement', 'bugfix'], start_date=start_date, end_date=date.today().strftime('%Y-%m-%d')).create() # Publish content view and re-read it self.rhel_6_partial_cv.publish() self.rhel_6_partial_cv = self.rhel_6_partial_cv.read() # Promote content view to 'DEV' and 'QE' assert len(self.rhel_6_partial_cv.version) == 1 for env in (self.dev_lce, self.qe_lce): promote(self.rhel_6_partial_cv.version[0], env.id) # Create host collection self.rhel_6_partial_hc = entities.HostCollection(organization=self.org, name=gen_alpha(), max_hosts=5).create() # Create activation key for content view kwargs = {'organization': self.org, 'environment': self.qe_lce.id} rhel_6_partial_ak = entities.ActivationKey( name=gen_alpha(), content_view=self.rhel_6_partial_cv, host_collection=[self.rhel_6_partial_hc], **kwargs).create() # Assign subscription to activation key. Fetch available subscriptions subs = entities.Subscription(organization=self.org).search() assert len(subs) > 0 # Add subscription to activation key sub_found = False for sub in subs: if sub.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME: rhel_6_partial_ak.add_subscriptions( data={u'subscription_id': sub.id}) sub_found = True assert sub_found # Enable product content in activation key rhel_6_partial_ak.content_override( data={ 'content_override': { u'content_label': REPOS['rhst6']['id'], u'value': u'1' } }) # Create client machine and register it to satellite with # rhel_6_partial_ak self.vm = VirtualMachine(distro=DISTRO_RHEL6, tag='incupdate') self.addCleanup(vm_cleanup, self.vm) self.setup_vm(self.vm, rhel_6_partial_ak.name, self.org.label) self.vm.enable_repo(REPOS['rhva6']['id']) timestamp = datetime.utcnow() self.vm.run('yum install -y {0}'.format(REAL_0_RH_PACKAGE)) # Find the content host and ensure that tasks started by package # installation has finished host = entities.Host().search( query={'search': 'name={}'.format(self.vm.hostname)}) wait_for_tasks( search_query='label = Actions::Katello::Host::UploadPackageProfile' ' and resource_id = {}' ' and started_at >= "{}"'.format(host[0].id, timestamp)) # Force host to generate or refresh errata applicability call_entity_method_with_timeout(host[0].errata_applicability, timeout=600)
def test_positive_end_to_end(self, fake_manifest_is_set, default_sat, rhel7_contenthost): """Perform end to end smoke tests using RH and custom repos. 1. Create a new user with admin permissions 2. Using the new user from above 1. Create a new organization 2. Clone and upload manifest 3. Create a new lifecycle environment 4. Create a custom product 5. Create a custom YUM repository 6. Enable a Red Hat repository 7. Synchronize these two repositories 8. Create a new content view 9. Associate the YUM and Red Hat repositories to new content view 10. Publish content view 11. Promote content view to the lifecycle environment 12. Create a new activation key 13. Add the products to the activation key 14. Create a new libvirt compute resource 15. Create a new subnet 16. Create a new domain 17. Create a new hostgroup and associate previous entities to it 18. Provision a client ** NOT CURRENTLY PROVISIONING :id: b2f73740-d3ce-4e6e-abc7-b23e5562bac1 :expectedresults: All tests should succeed and Content should be successfully fetched by client. :parametrized: yes """ # step 1: Create a new user with admin permissions login = gen_string('alphanumeric') password = gen_string('alphanumeric') entities.User(admin=True, login=login, password=password).create() # step 2.1: Create a new organization server_config = get_nailgun_config() server_config.auth = (login, password) org = entities.Organization(server_config).create() # step 2.2: Clone and upload manifest if fake_manifest_is_set: with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) # step 2.3: Create a new lifecycle environment le1 = entities.LifecycleEnvironment(server_config, organization=org).create() # step 2.4: Create a custom product prod = entities.Product(server_config, organization=org).create() repositories = [] # step 2.5: Create custom YUM repository custom_repo = entities.Repository(server_config, product=prod, content_type='yum', url=CUSTOM_RPM_REPO).create() repositories.append(custom_repo) # step 2.6: Enable a Red Hat repository if fake_manifest_is_set: rhel_repo = entities.Repository(id=enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=constants.PRDS['rhel'], repo=constants.REPOS['rhst7']['name'], reposet=constants.REPOSET['rhst7'], )) repositories.append(rhel_repo) # step 2.7: Synchronize these two repositories for repo in repositories: repo.sync() # step 2.8: Create content view content_view = entities.ContentView(server_config, organization=org).create() # step 2.9: Associate the YUM and Red Hat repositories to new content view content_view.repository = repositories content_view = content_view.update(['repository']) # step 2.10: Publish content view content_view.publish() # step 2.11: Promote content view to the lifecycle environment content_view = content_view.read() assert len(content_view.version) == 1 cv_version = content_view.version[0].read() assert len(cv_version.environment) == 1 promote(cv_version, le1.id) # check that content view exists in lifecycle content_view = content_view.read() assert len(content_view.version) == 1 cv_version = cv_version.read() # step 2.12: Create a new activation key activation_key_name = gen_string('alpha') activation_key = entities.ActivationKey( name=activation_key_name, environment=le1, organization=org, content_view=content_view).create() # step 2.13: Add the products to the activation key for sub in entities.Subscription(organization=org).search(): if sub.name == constants.DEFAULT_SUBSCRIPTION_NAME: activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': sub.id }) break # step 2.13.1: Enable product content if fake_manifest_is_set: activation_key.content_override( data={ 'content_overrides': [{ 'content_label': constants.REPOS['rhst7']['id'], 'value': '1' }] }) # BONUS: Create a content host and associate it with promoted # content view and last lifecycle where it exists content_host = entities.Host( content_facet_attributes={ 'content_view_id': content_view.id, 'lifecycle_environment_id': le1.id, }, organization=org, ).create() # check that content view matches what we passed assert content_host.content_facet_attributes[ 'content_view_id'] == content_view.id # check that lifecycle environment matches assert content_host.content_facet_attributes[ 'lifecycle_environment_id'] == le1.id # step 2.14: Create a new libvirt compute resource entities.LibvirtComputeResource( server_config, url=f'qemu+ssh://root@{settings.libvirt.libvirt_hostname}/system', ).create() # step 2.15: Create a new subnet subnet = entities.Subnet(server_config).create() # step 2.16: Create a new domain domain = entities.Domain(server_config).create() # step 2.17: Create a new hostgroup and associate previous entities to it entities.HostGroup(server_config, domain=domain, subnet=subnet).create() # step 2.18: Provision a client # TODO this isn't provisioning through satellite as intended # Note it wasn't well before the change that added this todo rhel7_contenthost.install_katello_ca(default_sat) # Register client with foreman server using act keys rhel7_contenthost.register_contenthost(org.label, activation_key_name) assert rhel7_contenthost.subscribed # Install rpm on client package_name = 'katello-agent' result = rhel7_contenthost.execute(f'yum install -y {package_name}') assert result.status == 0 # Verify that the package is installed by querying it result = rhel7_contenthost.run(f'rpm -q {package_name}') assert result.status == 0
def test_positive_service_level_subscription_with_custom_product( session, rhel7_contenthost, default_sat): """Subscribe a host to activation key with Premium service level and with custom product :id: 195a8049-860e-494d-b7f0-0794384194f7 :customerscenario: true :steps: 1. Create a product with custom repository synchronized 2. Create and Publish a content view with the created repository 3. Create an activation key and assign the created content view 4. Add a RedHat subscription to activation key (The product subscription should be added automatically) 5. Set the activation service_level to Premium 6. Register a host to activation key 7. List consumed subscriptions on host 8. List the subscription in Content Host UI :expectedresults: 1. The product subscription is listed in consumed subscriptions on host 2. The product subscription is listed in the contenthost subscriptions UI :BZ: 1394357 :parametrized: yes :CaseLevel: System """ org = entities.Organization().create() manifests.upload_manifest_locked(org.id) entities_ids = setup_org_for_a_custom_repo({ 'url': settings.repos.yum_1.url, 'organization-id': org.id }) product = entities.Product(id=entities_ids['product-id']).read() activation_key = entities.ActivationKey( id=entities_ids['activationkey-id']).read() # add the default RH subscription subscription = entities.Subscription(organization=org).search( query={'search': f'name="{constants.DEFAULT_SUBSCRIPTION_NAME}"'})[0] activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': subscription.id }) # ensure all the needed subscriptions are attached to activation key results = activation_key.subscriptions()['results'] assert {product.name, constants.DEFAULT_SUBSCRIPTION_NAME } == {ak_subscription['name'] for ak_subscription in results} # Set the activation service_level to Premium activation_key.service_level = 'Premium' activation_key = activation_key.update(['service_level']) rhel7_contenthost.install_katello_ca(default_sat) rhel7_contenthost.register_contenthost(org.label, activation_key=activation_key.name) assert rhel7_contenthost.subscribed result = rhel7_contenthost.run('subscription-manager list --consumed') assert result.status == 0 assert f'Subscription Name: {product.name}' in '\n'.join(result.stdout) with session: session.organization.select(org.name) chost = session.contenthost.read(rhel7_contenthost.hostname, widget_names='subscriptions') subscriptions = { subs['Repository Name'] for subs in chost['subscriptions']['resources']['assigned'] } assert product.name in subscriptions
def test_positive_schedule_generation_and_get_mail(session, module_org, module_loc): """ Schedule generating a report. Request the result be sent via e-mail. :id: cd19b90d-836f-4efd-c3bc-d5e09a909a67 :setup: User with reporting access rights, some Host :steps: 1. Monitor -> Report Templates 2. Registered Hosts -> Generate 3. Set schedule to current time + 1 minute 4. Check that the result should be sent via e-mail 5. Submit 6. Receive the e-mail :expectedresults: After ~1 minute, the same report is generated as if the results were downloaded from WebUI. The result is compressed. :CaseImportance: High """ # make sure we have some subscriptions with manifests.clone() as manifest: upload_manifest(module_org.id, manifest.content) # generate Subscriptions report with session: session.reporttemplate.schedule( "Subscriptions", values={ 'output_format': 'JSON', 'generate_at': '1970-01-01 17:10:00', 'email': True, 'email_to': 'root@localhost', }, ) file_path = '/tmp/{0}.json'.format(gen_string('alpha')) gzip_path = f'{file_path}.gz' expect_script = (f'#!/usr/bin/env expect\n' f'spawn mail\n' f'expect "& "\n' f'send "w $ /dev/null\\r"\n' f'expect "Enter filename"\n' f'send "\\r"\n' f'expect "Enter filename"\n' f'send "\\r"\n' f'expect "Enter filename"\n' f'send "\\025{gzip_path}\\r"\n' f'expect "&"\n' f'send "q\\r"\n') ssh.command(f'expect -c \'{expect_script}\'', hostname=settings.server.hostname) ssh.download_file(gzip_path) os.system(f'gunzip {gzip_path}') with open(file_path) as json_file: data = json.load(json_file) subscription_cnt = len( entities.Subscription(organization=module_org).search()) assert subscription_cnt > 0 assert len(data) >= subscription_cnt keys_expected = [ 'Available', 'Contract number', 'ID', 'Name', 'Quantity', 'SKU' ] for subscription in data: assert sorted(list(subscription.keys())) == keys_expected
def sync_tools_repos_to_upgrade(client_os, hosts): """This syncs tools repo in Satellite server and also attaches the new tools repo subscription onto each client :param string client_os: The client OS of which tools repo to be synced e.g: rhel6, rhel7 :param list hosts: The list of capsule hostnames to which new capsule repo subscription will be attached Following environment variable affects this function: TOOLS_URL_{client_os} The url of tools repo from latest satellite compose. FROM_VERSION Current Satellite version - to differentiate default organization. e.g. '6.1', '6.0' Personal Upgrade Env Vars: CLIENT_AK The ak_name attached to subscription of client Rhevm upgrade Env Vars: RHEV_CLIENT_AK The AK name used in client subscription """ client_os = client_os.upper() tools_repo_url = os.environ.get('TOOLS_URL_{}'.format(client_os)) if tools_repo_url is None: logger.warning('The Tools Repo URL for {} is not provided ' 'to perform Client Upgrade !'.format(client_os)) sys.exit(1) ak_name = os.environ.get( 'CLIENT_AK_{}'.format(client_os), os.environ.get('RHEV_CLIENT_AK_{}'.format(client_os))) if ak_name is None: logger.warning('The AK details are not provided for {0} Client ' 'upgrade!'.format(client_os)) sys.exit(1) org = entities.Organization().search( query={'search': 'name="{}"'.format("Default Organization")})[0] ak = entities.ActivationKey(organization=org).search( query={'search': 'name={}'.format(ak_name)})[0] cv = ak.content_view.read() lenv = ak.environment.read() toolsproduct_name = customcontents['tools']['prod'].format( client_os=client_os) toolsrepo_name = customcontents['tools']['repo'].format( client_os=client_os) # adding sleeps in between to avoid race conditions tools_product = entities.Product(name=toolsproduct_name, organization=org).create() tools_repo = entities.Repository(name=toolsrepo_name, product=tools_product, url=tools_repo_url, organization=org, content_type='yum').create() entities.Repository(id=tools_repo.id).sync() cv.repository += [tools_repo] cv.update(['repository']) call_entity_method_with_timeout(cv.read().publish, timeout=2500) published_ver = entities.ContentViewVersion( id=max([cv_ver.id for cv_ver in cv.read().version])).read() published_ver.promote(data={'environment_id': lenv.id, 'force': False}) tools_sub = entities.Subscription().search( query={'search': 'name={0}'.format(toolsproduct_name)})[0] ak.add_subscriptions(data={ 'quantity': 1, 'subscription_id': tools_sub.id, }) # Add this latest tools repo to hosts to upgrade sub = entities.Subscription().search( query={'search': 'name={0}'.format(toolsproduct_name)})[0] for host in hosts: if float(os.environ.get('FROM_VERSION')) <= 6.1: # If not User Hosts then, attach sub to dockered clients if not all([ os.environ.get('CLIENT6_HOSTS'), os.environ.get('CLIENT7_HOSTS') ]): docker_vm = os.environ.get('DOCKER_VM') execute(attach_subscription_to_host_from_content_host, sub.cp_id, True, host, host=docker_vm) # Else, Attach subs to user hosts else: execute(attach_subscription_to_host_from_content_host, sub.cp_id, host=host) else: host = entities.Host().search( query={'search': 'name={}'.format(host)})[0] entities.HostSubscription(host=host).add_subscriptions( data={'subscriptions': [{ 'id': sub.id, 'quantity': 1 }]})
def _sync_capsule_subscription_to_capsule_ak(ak): """Syncs to_version capsule contents, adds to the CV and attaches contents to the AK through which Capsule is registered :param ak: ```nailgun.entities.ActivationKey``` used for capsule subscription """ cv = ak.content_view.read() org = ak.organization capsule_repo = os.environ.get('CAPSULE_URL') to_version = os.environ.get('TO_VERSION') os_ver = os.environ.get('OS')[-1] # If custom capsule repo is not given then # enable capsule repo from Redhat Repositories if capsule_repo: cap_product = entities.Product(name=customcontents['capsule']['prod'], organization=org).create() cap_repo = entities.Repository(name=customcontents['capsule']['repo'], product=cap_product, url=capsule_repo, organization=org, content_type='yum').create() else: cap_product = entities.Product( name=rhelcontents['capsule']['prod'], organization=org).search(query={'per_page': 100})[0] cap_reposet = entities.RepositorySet( name=rhelcontents['capsule']['repo'].format(cap_ver=to_version, os_ver=os_ver), product=cap_product).search()[0] try: cap_reposet.enable( data={ 'basearch': 'x86_64', 'releasever': '7Server', 'organization_id': org.id }) except requests.exceptions.HTTPError as exp: logger.warn(exp) cap_repo = entities.Repository( name=rhelcontents['capsule']['repofull'].format( cap_ver=to_version, os_ver=os_ver, arch='x86_64')).search( query={ 'organization_id': org.id, 'per_page': 100 })[0] call_entity_method_with_timeout(entities.Repository(id=cap_repo.id).sync, timeout=2500) # Add repos to CV cv.repository += [cap_repo] cv.update(['repository']) ak = ak.read() if capsule_repo: cap_sub = entities.Subscription().search( query={ 'search': 'name={0}'.format(customcontents['capsule']['prod']) })[0] ak.add_subscriptions(data={ 'quantity': 1, 'subscription_id': cap_sub.id, }) else: ak.content_override( data={ 'content_override': { 'content_label': rhelcontents['capsule']['label'].format(cap_ver=to_version, os_ver=os_ver), 'value': '1' } })
def test_positive_install_multiple_packages_with_a_job_by_ip( self, fixture_vmsetup, fixture_org): """Run job to install several packages on host by ip :id: 8b73033f-83c9-4024-83c3-5e442a79d320 :expectedresults: Verify the packages were successfully installed on host :parametrized: yes """ self.org = fixture_org self.client = fixture_vmsetup # set connecting to host by ip Host.set_parameter({ 'host': self.client.hostname, 'name': 'remote_execution_connect_by_ip', 'value': 'True', }) packages = ["cow", "dog", "lion"] # Create a custom repo repo = entities.Repository( content_type='yum', product=entities.Product(organization=self.org).create(), url=FAKE_0_YUM_REPO, ).create() repo.sync() prod = repo.product.read() subs = entities.Subscription().search( query={'search': 'name={0}'.format(prod.name)}) assert len(subs) > 0, 'No subscriptions matching the product returned' ak = entities.ActivationKey( organization=self.org, content_view=self.org.default_content_view, environment=self.org.library, ).create() ak.add_subscriptions(data={'subscriptions': [{'id': subs[0].id}]}) self.client.register_contenthost(org=self.org.label, activation_key=ak.name) invocation_command = make_job_invocation({ 'job-template': 'Install Package - Katello SSH Default', 'inputs': 'package={0} {1} {2}'.format(*packages), 'search-query': "name ~ {0}".format(self.client.hostname), }) try: assert invocation_command['success'] == '1' except AssertionError: result = 'host output: {0}'.format(' '.join( JobInvocation.get_output({ 'id': invocation_command['id'], 'host': self.client.hostname }))) raise AssertionError(result) result = ssh.command("rpm -q {0}".format(" ".join(packages)), hostname=self.client.ip_addr) assert result.return_code == 0
def test_positive_view_VDC_subscription_products(self): """Ensure that Virtual Datacenters subscription provided products is not empty and that a consumed product exist in content products. :id: cc4593f0-66ab-4bf6-87d1-d4bd9c89eba5 :customerscenario: true :steps: 1. Upload a manifest with Virtual Datacenters subscription 2. Enable a products provided by Virtual Datacenters subscription, and synchronize the auto created repository 3. Create content view with the product repository, and publish it 4. Create a lifecycle environment and promote the content view to it. 5. Create an activation key with the content view and lifecycle environment 6. Subscribe a host to the activation key 7. Goto Hosts -> Content hosts and select the created content host 8. Attach VDC subscription to content host 9. Goto Content -> Red Hat Subscription 10. Select Virtual Datacenters subscription :expectedresults: 1. assert that the provided products is not empty 2. assert that the enabled product is in subscription Product Content :BZ: 1366327 :CaseLevel: System """ org = entities.Organization().create() subscription = entities.Subscription(organization=org) self.upload_manifest(org.id, manifests.clone()) vds_product_name = PRDS['rhdt'] vdc_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=vds_product_name, repo=REPOS['rhdt7']['name'], reposet=REPOSET['rhdt7'], releasever=None, ) vdc_repo = entities.Repository(id=vdc_repo_id) vdc_repo.sync() content_view = entities.ContentView(organization=org, repository=[vdc_repo]).create() content_view.publish() content_view = content_view.read() lce = entities.LifecycleEnvironment(organization=org).create() promote(content_view.version[0], lce.id) activation_key = entities.ActivationKey( organization=org, environment=lce, content_view=content_view).create() # add the default RH subscription for sub in subscription.search(): if sub.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME: activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': sub.id, }) break with VirtualMachine() as vm: vm.install_katello_ca() vm.register_contenthost(org.label, activation_key=activation_key.name) self.assertTrue(vm.subscribed) with Session(self) as session: set_context(session, org=org.name) self.contenthost.update( vm.hostname, add_subscriptions=[VDC_SUBSCRIPTION_NAME], ) self.assertIsNotNone( self.contenthost.wait_until_element( common_locators['alert.success_sub_form'])) # ensure that subscription provided products list is not empty provided_products = self.subscriptions.get_provided_products( VDC_SUBSCRIPTION_NAME) self.assertGreater(len(provided_products), 0) # ensure that the product is in provided products self.assertIn(vds_product_name, provided_products) # ensure that product is in content products content_products = self.subscriptions.get_content_products( VDC_SUBSCRIPTION_NAME) self.assertEqual(len(content_products), 1) self.assertIn(vds_product_name, content_products)
def test_pre_scenario_generate_errata_for_client(self): """Create product and repo from which the errata will be generated for the Satellite client or content host. :id: 88fd28e6-b4df-46c0-91d6-784859fd1c21 :steps: 1. Create Life Cycle Environment, Product and Custom Yum Repo 2. Create custom tools, rhel repos and sync them 3. Create content view and publish it 4. Create activation key and add subscription. 5. Registering Docker Content Host RHEL7 6. Check katello agent and goferd service running on host 7. Generate Errata by Installing Outdated/Older Packages 8. Collect the Erratum list :expectedresults: 1. The content host is created 2. errata count, erratum list will be generated to satellite client/content host """ org = entities.Organization().create() loc = entities.Location(organization=[org]).create() environment = entities.LifecycleEnvironment(organization=org).search( query={'search': 'name=Library'})[0] product = entities.Product(organization=org).create() custom_yum_repo = entities.Repository( product=product, content_type='yum', url=settings.repos.yum_9.url).create() product.sync() tools_repo, rhel_repo = self._create_custom_rhel_tools_repos(product) repolist = [custom_yum_repo, tools_repo, rhel_repo] content_view = publish_content_view(org=org, repolist=repolist) ak = entities.ActivationKey(content_view=content_view, organization=org.id, environment=environment).create() subscription = entities.Subscription(organization=org).search( query={'search': f'name={product.name}'})[0] ak.add_subscriptions(data={'subscription_id': subscription.id}) rhel7_client = dockerize(ak_name=ak.name, distro='rhel7', org_label=org.label) client_container_id = list(rhel7_client.values())[0] client_container_name = [key for key in rhel7_client.keys()][0] host_location_update(client_container_name=client_container_name, logger_obj=logger, loc=loc) docker_vm = settings.upgrade.docker_vm wait_for( lambda: org.name in execute( docker_execute_command, client_container_id, 'subscription-manager identity', host=docker_vm, )[docker_vm], timeout=800, delay=2, logger=logger, ) install_or_update_package(client_hostname=client_container_id, package="katello-agent") run_goferd(client_hostname=client_container_id) for package in FAKE_9_YUM_OUTDATED_PACKAGES: install_or_update_package(client_hostname=client_container_id, package=package) host = entities.Host().search( query={'search': f'activation_key={ak.name}'})[0] installable_errata_count = host.content_facet_attributes[ 'errata_counts']['total'] assert installable_errata_count > 1 erratum_list = entities.Errata(repository=custom_yum_repo).search( query={ 'order': 'updated ASC', 'per_page': 1000 }) errata_ids = [errata.errata_id for errata in erratum_list] assert sorted(errata_ids) == sorted(settings.repos.yum_9.errata) scenario_dict = { self.__class__.__name__: { 'rhel_client': rhel7_client, 'activation_key': ak.name, 'custom_repo_id': custom_yum_repo.id, 'product_id': product.id, 'conten_view_id': content_view.id, } } create_dict(scenario_dict)
def upload_manifest_locked(org_id, manifest=None, interface=INTERFACE_API, timeout=None): """Upload a manifest with locking, using the requested interface. :type org_id: int :type manifest: robottelo.manifests.Manifest :type interface: str :type timeout: int :returns: the upload result Note: The manifest uploading is strictly locked only when using this function Usage:: # for API interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_API) # for CLI interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_CLI) # or in one line with default interface result = upload_manifest_locked(org_id, manifests.clone()) subscription_id = result[id'] """ if interface not in [INTERFACE_API, INTERFACE_CLI]: raise ValueError( 'upload manifest with interface "{0}" not supported'.format( interface)) if manifest is None: manifest = clone() if timeout is None: # Set the timeout to 1500 seconds to align with the API timeout. # And as we are in locked state, other functions/tests can try to upload the manifest in # other processes and we do not want to be interrupted by the default configuration # ssh_client timeout. timeout = 1500 if interface == INTERFACE_API: with manifest: result = entities.Subscription().upload( data={'organization_id': org_id}, files={'content': manifest.content}) else: # interface is INTERFACE_CLI with manifest: upload_file(manifest.content, manifest.filename) result = Subscription.upload( { 'file': manifest.filename, 'organization-id': org_id }, timeout=timeout) return result
def _try_to_get_guest_bonus(self, hypervisor_name, sku): subscriptions = entities.Subscription().search(query={'search': sku}) for item in subscriptions: item = item.read_json() if hypervisor_name.lower() in item['hypervisor']['name']: return item['id']
def test_positive_mirror_on_sync(self): """Create 2 repositories with 'on_demand' download policy and mirror on sync option, associate them with capsule, sync first repo, move package from first repo to second one, sync it, attempt to install package on some host. :id: 39149642-1e7e-4ef8-8762-bec295913014 :BZ: 1426408 :expectedresults: host, subscribed to second repo only, can successfully install package :CaseLevel: System """ repo1_name = gen_string('alphanumeric') repo2_name = gen_string('alphanumeric') # Create and publish first custom repository with 2 packages in it repo1_url = create_repo( repo1_name, FAKE_1_YUM_REPO, FAKE_1_YUM_REPO_RPMS[1:3], ) # Create and publish second repo with no packages in it repo2_url = create_repo(repo2_name) # Create organization, product, repository in satellite, and lifecycle # environment org = entities.Organization().create() prod1 = entities.Product(organization=org).create() repo1 = entities.Repository( download_policy='on_demand', mirror_on_sync=True, product=prod1, url=repo1_url, ).create() prod2 = entities.Product(organization=org).create() repo2 = entities.Repository( download_policy='on_demand', mirror_on_sync=True, product=prod2, url=repo2_url, ).create() lce1 = entities.LifecycleEnvironment(organization=org).create() lce2 = entities.LifecycleEnvironment(organization=org).create() # Associate the lifecycle environments with the capsule capsule = entities.Capsule(id=self.capsule_id).read() for lce_id in (lce1.id, lce2.id): capsule.content_add_lifecycle_environment(data={ 'environment_id': lce_id, }) result = capsule.content_lifecycle_environments() self.assertGreaterEqual(len(result['results']), 2) self.assertTrue({lce1.id, lce2.id}.issubset( [capsule_lce['id'] for capsule_lce in result['results']]), ) # Create content views with the repositories cv1 = entities.ContentView( organization=org, repository=[repo1], ).create() cv2 = entities.ContentView( organization=org, repository=[repo2], ).create() # Sync first repository repo1.sync() repo1 = repo1.read() # Publish new version of the content view cv1.publish() cv1 = cv1.read() self.assertEqual(len(cv1.version), 1) cvv1 = cv1.version[-1].read() # Promote content view to lifecycle environment promote(cvv1, lce1.id) cvv1 = cvv1.read() self.assertEqual(len(cvv1.environment), 2) # Assert that a task to sync lifecycle environment to the capsule # is started (or finished already) sync_status = capsule.content_get_sync() self.assertTrue( len(sync_status['active_sync_tasks']) >= 1 or sync_status['last_sync_time']) # Wait till capsule sync finishes for task in sync_status['active_sync_tasks']: entities.ForemanTask(id=task['id']).poll() # Move one package from the first repo to second one ssh.command('mv {} {}'.format( os.path.join( PULP_PUBLISHED_YUM_REPOS_PATH, repo1_name, FAKE_1_YUM_REPO_RPMS[2], ), os.path.join( PULP_PUBLISHED_YUM_REPOS_PATH, repo2_name, FAKE_1_YUM_REPO_RPMS[2], ), )) # Update repositories (re-trigger 'createrepo' command) create_repo(repo1_name) create_repo(repo2_name) # Synchronize first repository repo1.sync() cv1.publish() cv1 = cv1.read() self.assertEqual(len(cv1.version), 2) cv1.version.sort(key=lambda version: version.id) cvv1 = cv1.version[-1].read() # Promote content view to lifecycle environment promote(cvv1, lce1.id) cvv1 = cvv1.read() self.assertEqual(len(cvv1.environment), 2) # Synchronize second repository repo2.sync() repo2 = repo2.read() self.assertEqual(repo2.content_counts['package'], 1) cv2.publish() cv2 = cv2.read() self.assertEqual(len(cv2.version), 1) cvv2 = cv2.version[-1].read() # Promote content view to lifecycle environment promote(cvv2, lce2.id) cvv2 = cvv2.read() self.assertEqual(len(cvv2.environment), 2) # Create activation key, add subscription to second repo only activation_key = entities.ActivationKey( content_view=cv2, environment=lce2, organization=org, ).create() subscription = entities.Subscription(organization=org).search( query={'search': 'name={}'.format(prod2.name)})[0] activation_key.add_subscriptions( data={'subscription_id': subscription.id}) # Subscribe a host with activation key with VirtualMachine(distro=DISTRO_RHEL7) as client: client.install_katello_ca() client.register_contenthost( org.label, activation_key.name, ) # Install the package package_name = FAKE_1_YUM_REPO_RPMS[2].rstrip('.rpm') result = client.run('yum install -y {}'.format(package_name)) self.assertEqual(result.return_code, 0) # Ensure package installed result = client.run('rpm -qa | grep {}'.format(package_name)) self.assertEqual(result.return_code, 0) self.assertIn(package_name, result.stdout[0])
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 193d0159-d4a7-4f50-b037-7289f4576ade :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 5 * 60 # delay for sync date in seconds plan_name = gen_string('alpha') org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() with Session(self) as session: startdate = (self.get_client_datetime(session.browser) + timedelta(seconds=delay)) make_syncplan( session, org=org.name, name=plan_name, description='sync plan create with start time', interval=u'hourly', start_hour=startdate.strftime('%H'), start_minute=startdate.strftime('%M'), ) # Associate sync plan with product self.syncplan.update( plan_name, add_products=[PRDS['rhel']]) # Wait half of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay/2, PRDS['rhel'])) sleep(delay / 4) # Verify product has not been synced yet with self.assertRaises(AssertionError): self.validate_task_status(repo.id, max_tries=2) self.validate_repo_content( repo, ['erratum', 'package', 'package_group'], after_sync=False, ) # Wait the rest of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format(delay/2, PRDS['rhel'])) sleep(delay * 3/4) # Verify product was synced successfully self.validate_task_status(repo.id, repo_backend_id=repo.backend_identifier ) self.validate_repo_content( repo, ['erratum', 'package', 'package_group'], )
def test_pre_scenario_generate_errata_with_previous_version_katello_agent_client( self, default_org): """Create product and repo from which the errata will be generated for the Satellite client or content host. :id: preupgrade-4e515f84-2582-4b8b-a625-9f6c6966aa59 :steps: 1. Create Life Cycle Environment, Product and Custom Yum Repo. 2. Enable/sync 'base os RHEL7' and tools repos. 3. Create a content view and publish it. 4. Create activation key and add subscription. 5. Registering Docker Content Host RHEL7. 6. Install and check katello agent and goferd service running on host. 7. Generate Errata by Installing Outdated/Older Packages. 8. Collect the Erratum list. :expectedresults: 1. The content host is created. 2. errata count, erratum list will be generated to satellite client/content host. """ environment = entities.LifecycleEnvironment( organization=default_org).search( query={'search': 'name=Library'})[0] product = entities.Product(organization=default_org).create() custom_yum_repo = entities.Repository( product=product, content_type='yum', url=settings.repos.yum_9.url).create() call_entity_method_with_timeout(product.sync, timeout=1400) repos = self._get_rh_rhel_tools_repos(default_org) repos.append(custom_yum_repo) content_view = publish_content_view(org=default_org, repolist=repos) custom_sub = entities.Subscription(organization=default_org).search( query={'search': f'name={product.name}'})[0] rh_sub = entities.Subscription(organization=1).search( query={'search': f'{DEFAULT_SUBSCRIPTION_NAME}'})[0] ak = entities.ActivationKey( content_view=content_view, organization=default_org.id, environment=environment, auto_attach=False, ).create() ak.add_subscriptions(data={'subscription_id': custom_sub.id}) ak.add_subscriptions(data={'subscription_id': rh_sub.id}) rhel7_client = dockerize(ak_name=ak.name, distro='rhel7', org_label=default_org.label) client_container_id = list(rhel7_client.values())[0] docker_vm = settings.upgrade.docker_vm wait_for( lambda: default_org.label in execute( docker_execute_command, client_container_id, 'subscription-manager identity', host=docker_vm, )[docker_vm], timeout=800, delay=2, logger=logger, ) status = execute( docker_execute_command, client_container_id, 'subscription-manager identity', host=docker_vm, )[docker_vm] assert default_org.label in status # Update OS to make errata count 0 execute(docker_execute_command, client_container_id, 'yum update -y', host=docker_vm)[docker_vm] install_or_update_package(client_hostname=client_container_id, package="katello-agent") run_goferd(client_hostname=client_container_id) for package in FAKE_9_YUM_OUTDATED_PACKAGES: install_or_update_package(client_hostname=client_container_id, package=package) host = entities.Host().search( query={'search': f'activation_key={ak.name}'})[0] installable_errata_count = host.content_facet_attributes[ 'errata_counts']['total'] assert installable_errata_count > 1 erratum_list = entities.Errata(repository=custom_yum_repo).search( query={ 'order': 'updated ASC', 'per_page': 1000 }) errata_ids = [errata.errata_id for errata in erratum_list] assert sorted(errata_ids) == sorted(settings.repos.yum_9.errata) scenario_dict = { self.__class__.__name__: { 'rhel_client': rhel7_client, 'activation_key': ak.name, 'custom_repo_id': custom_yum_repo.id, 'product_id': product.id, } } create_dict(scenario_dict)
def test_pre_scenario_generate_errata_for_client(self): """Create product and repo from which the errata will be generated for the Satellite client or content host. :id: 88fd28e6-b4df-46c0-91d6-784859fd1c21 :steps: 1. Create Life Cycle Environment, Product and Custom Yum Repo 2. Create custom tools, rhel repos and sync them 3. Create content view and publish it 4. Create activation key and add subscription. 5. Registering Docker Content Host RHEL7 6. Check katello agent and goferd service running on host 7. Generate Errata by Installing Outdated/Older Packages 8. Collect the Erratum list :expectedresults: 1. The content host is created 2. errata count, erratum list will be generated to satellite client/content host """ org = entities.Organization().create() loc = entities.Location(organization=[org]).create() environment = entities.LifecycleEnvironment(organization=org).search( query={'search': 'name=Library'})[0] product = entities.Product(organization=org).create() custom_yum_repo = entities.Repository(product=product, content_type='yum', url=FAKE_9_YUM_REPO).create() product.sync() tools_repo, rhel_repo = self._create_custom_rhel_tools_repos(product) repolist = [custom_yum_repo, tools_repo, rhel_repo] content_view = self._publish_content_view(org=org, repolist=repolist) ak = entities.ActivationKey(content_view=content_view, organization=org.id, environment=environment).create() subscription = entities.Subscription(organization=org).search( query={'search': 'name={}'.format(product.name)})[0] ak.add_subscriptions(data={'subscription_id': subscription.id}) rhel7_client = dockerize(ak_name=ak.name, distro='rhel7', org_label=org.label) client_container_id = list(rhel7_client.values())[0] client_container_name = [key for key in rhel7_client.keys()][0] self._host_location_update(client_container_name=client_container_name, loc=loc) self._install_or_update_package(client_container_id, 'katello-agent') self._run_goferd(client_container_id) for package in FAKE_9_YUM_OUTDATED_PACKAGES: self._install_or_update_package(client_container_id, package) host = entities.Host().search( query={'search': 'activation_key={0}'.format(ak.name)})[0] applicable_errata_count = host.content_facet_attributes[ 'errata_counts']['total'] self.assertGreater(applicable_errata_count, 1) erratum_list = entities.Errata(repository=custom_yum_repo).search( query={ 'order': 'updated ASC', 'per_page': 1000, }) errata_ids = [errata.errata_id for errata in erratum_list] self.assertEqual(sorted(errata_ids), sorted(FAKE_9_YUM_ERRATUM)) scenario_dict = { self.__class__.__name__: { 'rhel_client': rhel7_client, 'activation_key': ak.name, 'custom_repo_id': custom_yum_repo.id, 'product_id': product.id, 'conten_view_id': content_view.id } } create_dict(scenario_dict)
def test_positive_service_level_subscription_with_custom_product(self): """Subscribe a host to activation key with Premium service level and with custom product :id: 195a8049-860e-494d-b7f0-0794384194f7 :customerscenario: true :steps: 1. Create a product with custom repository synchronized 2. Create and Publish a content view with the created repository 3. Create an activation key and assign the created content view 4. Add a RedHat subscription to activation key (The product subscription should be added automatically) 5. Set the activation service_level to Premium 6. Register a host to activation key 7. List consumed subscriptions on host 8. List the subscription in Content Host UI :expectedresults: 1. The product subscription is listed in consumed subscriptions on host 2. The product subscription is listed in the contenthost subscriptions UI :BZ: 1394357 :CaseLevel: System """ org = entities.Organization().create() self.upload_manifest(org.id, manifests.clone()) subscription = entities.Subscription(organization=org) entities_ids = setup_org_for_a_custom_repo({ 'url': FAKE_1_YUM_REPO, 'organization-id': org.id, }) product = entities.Product(id=entities_ids['product-id']).read() activation_key = entities.ActivationKey( id=entities_ids['activationkey-id']).read() # add the default RH subscription for sub in subscription.search(): if sub.read_json()['product_name'] == DEFAULT_SUBSCRIPTION_NAME: activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': sub.id, }) break # ensure all the needed subscriptions are attached to activation key results = activation_key.subscriptions()['results'] self.assertEqual( {product.name, DEFAULT_SUBSCRIPTION_NAME}, {ak_subscription['name'] for ak_subscription in results} ) activation_key.service_level = 'Premium' activation_key = activation_key.update(['service_level']) with VirtualMachine() as vm: vm.install_katello_ca() vm.register_contenthost( org.label, activation_key=activation_key.name) self.assertTrue(vm.subscribed) result = vm.run('subscription-manager list --consumed') self.assertEqual(result.return_code, 0) self.assertIn('Subscription Name: {0}'.format(product.name), '\n'.join(result.stdout)) with Session(self) as session: set_context(session, org=org.name) self.contenthost.search_and_click(vm.hostname) self.contenthost.click( tab_locators['contenthost.tab_subscriptions']) self.contenthost.click( tab_locators['contenthost.tab_subscriptions_subscriptions'] ) self.assertIsNotNone( self.contenthost.wait_until_element( locators['contenthost.subscription_select'] % product.name) )
def test_pre_scenario_custom_repo_check(self): """This is pre-upgrade scenario test to verify if we can create a custom repository and consume it via content host. :id: preupgrade-eb6831b1-c5b6-4941-a325-994a09467478 :steps: 1. Before Satellite upgrade. 2. Create new Organization, Location. 3. Create Product, custom repo, cv. 4. Create activation key and add subscription. 5. Create a content host, register and install package on it. :expectedresults: 1. Custom repo is created. 2. Package is installed on Content host. """ org = entities.Organization().create() loc = entities.Location(organization=[org]).create() lce = entities.LifecycleEnvironment(organization=org).create() product = entities.Product(organization=org).create() self._create_repo() repo = entities.Repository(product=product.id, url=self.custom_repo).create() repo.sync() content_view = self._create_publish_content_view(org=org, repo=repo) promote(content_view.version[0], lce.id) result = ssh.command( 'ls /var/lib/pulp/published/yum/https/repos/{}/{}/{}/custom/{}/{}/' 'Packages/b/|grep {}'.format(org.label, lce.name, content_view.label, product.label, repo.label, self.rpm1_name)) self.assertEqual(result.return_code, 0) self.assertGreaterEqual(len(result.stdout), 1) subscription = entities.Subscription(organization=org).search( query={'search': 'name={}'.format(product.name)})[0] ak = entities.ActivationKey(content_view=content_view, organization=org.id, environment=lce).create() ak.add_subscriptions(data={'subscription_id': subscription.id}) rhel7_client = dockerize(ak_name=ak.name, distro='rhel7', org_label=org.label) client_container_id = [value for value in rhel7_client.values()][0] client_container_name = [key for key in rhel7_client.keys()][0] self._host_location_update(client_container_name=client_container_name, loc=loc) status = execute(docker_execute_command, client_container_id, 'subscription-manager identity', host=self.docker_vm)[self.docker_vm] self.assertIn(org.name, status) self._install_package(client_container_id, self.rpm1_name) scenario_dict = { self.__class__.__name__: { 'content_view_name': content_view.name, 'lce_id': lce.id, 'lce_name': lce.name, 'org_label': org.label, 'prod_label': product.label, 'rhel_client': rhel7_client, 'repo_name': repo.name, } } create_dict(scenario_dict)
def test_positive_rename_satellite(self): """run katello-change-hostname on Satellite server :id: 9944bfb1-1440-4820-ada8-2e219f09c0be :setup: Satellite server with synchronized rh and custom repos and with a registered host :steps: 1. Rename Satellite using katello-change-hostname 2. Do basic checks for hostname change (hostnamctl) 3. Run some existence tests, as in backup testing 4. Verify certificates were properly recreated, check for instances of old hostname in etc/foreman-installer/scenarios.d/ 5. Check for updated repo urls, installation media paths, updated internal capsule 6. Check usability of entities created before rename: refresh manifest, resync repos, republish CVs and re-register hosts 7. Create new entities (run end-to-end test from robottelo) :BZ: 1469466 :expectedresults: Satellite hostname is successfully updated and the server functions correctly :CaseAutomation: automated """ with original_manifest() as manifest: upload_manifest(self.org.id, manifest.content) with get_connection() as connection: old_hostname = connection.run('hostname').stdout[0] new_hostname = 'new-{0}'.format(old_hostname) # create installation medium with hostname in path medium_path = 'http://{0}/testpath-{1}/os/'.format( old_hostname, gen_string('alpha')) medium = entities.Media(organization=[self.org], path_=medium_path).create() repo = entities.Repository(product=self.product, name='testrepo').create() result = connection.run( 'satellite-change-hostname {0} -y -u {1} -p {2}'.format( new_hostname, self.username, self.password), timeout=1200, ) self.assertEqual(result.return_code, 0, 'unsuccessful rename') self.assertIn(BCK_MSG, result.stdout) # services running after rename? result = connection.run('hammer ping') self.assertEqual(result.return_code, 0, 'services did not start properly') # basic hostname check result = connection.run('hostname') self.assertEqual(result.return_code, 0) self.assertIn(new_hostname, result.stdout, 'hostname left unchanged') # check default capsule result = connection.run( 'hammer -u {1} -p {2} --output json capsule \ info --name {0}'.format(new_hostname, self.username, self.password), output_format='json', ) self.assertEqual(result.return_code, 0, 'internal capsule not renamed correctly') self.assertEqual(result.stdout['url'], "https://{}:9090".format(new_hostname)) # check old consumer certs were deleted result = connection.run('rpm -qa | grep ^{}'.format(old_hostname)) self.assertEqual(result.return_code, 1, 'old consumer certificates not removed') # check new consumer certs were created result = connection.run('rpm -qa | grep ^{}'.format(new_hostname)) self.assertEqual(result.return_code, 0, 'new consumer certificates not created') # check if installation media paths were updated result = connection.run( 'hammer -u {1} -p {2} --output json \ medium info --id {0}'.format(medium.id, self.username, self.password), output_format='json', ) self.assertEqual(result.return_code, 0) self.assertIn(new_hostname, result.stdout['path'], 'medium path not updated correctly') # check answer file for instances of old hostname ans_f = '/etc/foreman-installer/scenarios.d/satellite-answers.yaml' result = connection.run('grep " {0}" {1}'.format( old_hostname, ans_f)) self.assertEqual( result.return_code, 1, 'old hostname was not correctly replaced \ in answers.yml', ) # check repository published at path result = connection.run( 'hammer -u {1} -p {2} --output json \ repository info --id {0}'.format( repo.id, self.username, self.password), output_format='json', ) self.assertEqual(result.return_code, 0) self.assertIn( new_hostname, result.stdout['published-at'], 'repository published path not updated correctly', ) # refresh manifest sub = entities.Subscription(organization=self.org) sub.refresh_manifest(data={'organization_id': self.org.id}) # sync and publish the previously created repo repo.sync() cv = entities.ContentView(organization=self.org).create() cv.repository = [repo] cv.update(['repository']) cv.publish()
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 6697a00f-2181-4c2b-88eb-2333268d780b :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 2 * 60 # delay for sync date in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload(data={'organization_id': org.id}, files={'content': manifest.content}) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] repo = entities.Repository(id=repo_id).read() if is_open('BZ:1695733'): self.logger.info('Need to set seconds to zero because BZ:1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta( seconds=delay) else: sync_date = (datetime.utcnow() + timedelta(seconds=delay), ) sync_plan = entities.SyncPlan(organization=org, enabled=True, interval='hourly', sync_date=sync_date).create() # Create and Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product is not synced and doesn't have any content with self.assertRaises(AssertionError): self.validate_task_status(repo.id, max_tries=1) self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait quarter of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay / 4, product.name)) sleep(delay / 4) # Verify product has not been synced yet with self.assertRaises(AssertionError): self.validate_task_status(repo.id, max_tries=1) self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format((delay * 3 / 4), product.name)) sleep(delay * 3 / 4) # Verify product was synced successfully self.validate_task_status(repo.id, repo_backend_id=repo.backend_identifier) self.validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def test_positive_end_to_end(self): """Perform end to end smoke tests using RH and custom repos. 1. Create a new user with admin permissions 2. Using the new user from above 1. Create a new organization 2. Clone and upload manifest 3. Create a new lifecycle environment 4. Create a custom product 5. Create a custom YUM repository 6. Create a custom PUPPET repository 7. Enable a Red Hat repository 8. Synchronize the three repositories 9. Create a new content view 10. Associate the YUM and Red Hat repositories to new content view 11. Add a PUPPET module to new content view 12. Publish content view 13. Promote content view to the lifecycle environment 14. Create a new activation key 15. Add the products to the activation key 16. Create a new libvirt compute resource 17. Create a new subnet 18. Create a new domain 19. Create a new hostgroup and associate previous entities to it 20. Provision a client :id: b2f73740-d3ce-4e6e-abc7-b23e5562bac1 :expectedresults: All tests should succeed and Content should be successfully fetched by client. """ # step 1: Create a new user with admin permissions login = gen_string('alphanumeric') password = gen_string('alphanumeric') entities.User(admin=True, login=login, password=password).create() # step 2.1: Create a new organization server_config = get_nailgun_config() server_config.auth = (login, password) org = entities.Organization(server_config).create() # step 2.2: Clone and upload manifest if self.fake_manifest_is_set: with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) # step 2.3: Create a new lifecycle environment le1 = entities.LifecycleEnvironment(server_config, organization=org).create() # step 2.4: Create a custom product prod = entities.Product(server_config, organization=org).create() repositories = [] # step 2.5: Create custom YUM repository repo1 = entities.Repository(server_config, product=prod, content_type='yum', url=CUSTOM_RPM_REPO).create() repositories.append(repo1) # step 2.6: Create custom PUPPET repository repo2 = entities.Repository(server_config, product=prod, content_type='puppet', url=FAKE_0_PUPPET_REPO).create() repositories.append(repo2) # step 2.7: Enable a Red Hat repository if self.fake_manifest_is_set: repo3 = entities.Repository(id=enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever='6Server', )) repositories.append(repo3) # step 2.8: Synchronize the three repositories for repo in repositories: repo.sync() # step 2.9: Create content view content_view = entities.ContentView(server_config, organization=org).create() # step 2.10: Associate the YUM and Red Hat repositories to new content # view repositories.remove(repo2) content_view.repository = repositories content_view = content_view.update(['repository']) # step 2.11: Add a PUPPET module to new content view puppet_mods = content_view.available_puppet_modules() self.assertGreater(len(puppet_mods['results']), 0) puppet_module = random.choice(puppet_mods['results']) puppet = entities.ContentViewPuppetModule( author=puppet_module['author'], content_view=content_view, name=puppet_module['name']).create() self.assertEqual(puppet.name, puppet_module['name']) # step 2.12: Publish content view content_view.publish() # step 2.13: Promote content view to the lifecycle environment content_view = content_view.read() self.assertEqual(len(content_view.version), 1) cv_version = content_view.version[0].read() self.assertEqual(len(cv_version.environment), 1) promote(cv_version, le1.id) # check that content view exists in lifecycle content_view = content_view.read() self.assertEqual(len(content_view.version), 1) cv_version = cv_version.read() # step 2.14: Create a new activation key activation_key_name = gen_string('alpha') activation_key = entities.ActivationKey( name=activation_key_name, environment=le1, organization=org, content_view=content_view).create() # step 2.15: Add the products to the activation key for sub in entities.Subscription(organization=org).search(): if sub.name == DEFAULT_SUBSCRIPTION_NAME: activation_key.add_subscriptions(data={ 'quantity': 1, 'subscription_id': sub.id }) break # step 2.15.1: Enable product content if self.fake_manifest_is_set: activation_key.content_override( data={ 'content_overrides': [{ 'content_label': AK_CONTENT_LABEL, 'value': '1' }] }) # BONUS: Create a content host and associate it with promoted # content view and last lifecycle where it exists content_host = entities.Host( content_facet_attributes={ 'content_view_id': content_view.id, 'lifecycle_environment_id': le1.id, }, organization=org, ).create() # check that content view matches what we passed self.assertEqual( content_host.content_facet_attributes['content_view_id'], content_view.id) # check that lifecycle environment matches self.assertEqual( content_host.content_facet_attributes['lifecycle_environment_id'], le1.id) # step 2.16: Create a new libvirt compute resource entities.LibvirtComputeResource( server_config, url='qemu+ssh://root@{0}/system'.format( settings.compute_resources.libvirt_hostname), ).create() # step 2.17: Create a new subnet subnet = entities.Subnet(server_config).create() # step 2.18: Create a new domain domain = entities.Domain(server_config).create() # step 2.19: Create a new hostgroup and associate previous entities to # it entities.HostGroup(server_config, domain=domain, subnet=subnet).create() # step 2.20: Provision a client self.client_provisioning(activation_key_name, org.label)
def test_positive_add_remove_subscription(module_org, module_ak_cv_lce, module_puppet_classes): """Try to bulk add and remove a subscription to members of a host collection. :id: c4ec5727-eb25-452e-a91f-87cafb16666b :steps: 1. Create HC, add AK to HC 2. Create product so we can use it's subscription 3. Create some VMs and register them with AK so they are in HC 4. Add the subscription to the members of the Host Collection 5. Assert subscription is added 6. Bulk remove subscription 7. Assert it is removed :expectedresults: subscription added to, and removed from, members of host collection :CaseImportance: Critical """ # this command creates a host collection and "appends", makes available, to the AK module_ak_cv_lce.host_collection.append( entities.HostCollection(organization=module_org).create()) # Move HC from Add tab to List tab on AK view module_ak_cv_lce = module_ak_cv_lce.update(['host_collection']) # Create a product so we have a subscription to use product = entities.Product(organization=module_org).create() prod_name = product.name product_subscription = entities.Subscription().search( query={'search': f'name={prod_name}'})[0] # Create and register VMs as members of Host Collection with VMBroker(nick='rhel7', host_classes={'host': ContentHost}, _count=2) as hosts: for client in hosts: client.install_katello_ca() client.register_contenthost(module_org.label, module_ak_cv_lce.name) # Read host_collection back from Satellite to get host_ids host_collection = module_ak_cv_lce.host_collection[0].read() host_ids = [host.id for host in host_collection.host] # Add subscription # Call nailgun to make the API PUT to members of Host Collection entities.Host().bulk_add_subscriptions( data={ "organization_id": module_org.id, "included": { "ids": host_ids }, "subscriptions": [{ "id": product_subscription.id, "quantity": 1 }], }) # GET the subscriptions from hosts and assert they are there for host_id in host_ids: req = entities.HostSubscription(host=host_id).subscriptions() assert (prod_name in req['results'][0]['product_name'] ), 'Subscription not applied to HC members' # Remove the subscription # Call nailgun to make the API PUT to members of Host Collection entities.Host().bulk_remove_subscriptions( data={ "organization_id": module_org.id, "included": { "ids": host_ids }, "subscriptions": [{ "id": product_subscription.id, "quantity": 1 }], }) # GET the subscriptions from hosts and assert they are gone for host_id in host_ids: req = entities.HostSubscription(host=host_id).subscriptions() assert not req[ 'results'], 'Subscription not removed from HC members'