def test_positive_reinstall_on_same_node_after_remove(self): """Reinstall capsule on the same node after remove :id: fac35a44-0bc9-44e9-a2c3-398e1aa9900c :customerscenario: true :expectedresults: The capsule successfully reinstalled :BZ: 1327442 :CaseLevel: System """ # Note: capsule-remove has been replaced by katello-remove with CapsuleVirtualMachine() as capsule_vm: # ensure that capsule refresh-features succeed with self.assertNotRaises(CLIReturnCodeError): Capsule.refresh_features( {'name': capsule_vm._capsule_hostname}) # katello-remove command request to confirm by typing Y and then by # typing remove result = capsule_vm.run("printf 'Y\nremove\n' | katello-remove") self.assertEqual(result.return_code, 0) # ensure that capsule refresh-features fail with self.assertRaises(CLIReturnCodeError): Capsule.refresh_features( {'name': capsule_vm._capsule_hostname}) # reinstall katello certs as they have been removed capsule_vm.install_katello_ca() # refresh subscription capsule_vm.run('subscription-manager refresh') # install satellite-capsule package result = capsule_vm.run('yum install -y satellite-capsule') self.assertEqual(result.return_code, 0) # generate capsule certs and installer command cert_file_path = '/tmp/{0}-certs.tar'.format(capsule_vm.hostname) result = ssh.command('capsule-certs-generate ' '--foreman-proxy-fqdn {0} ' '--certs-tar {1}'.format( capsule_vm.hostname, cert_file_path)) self.assertEqual(result.return_code, 0) # retrieve the installer command from the result output installer_cmd = extract_capsule_satellite_installer_command( result.stdout) # copy the generated certs to capsule vm _, temporary_local_cert_file_path = mkstemp(suffix='-certs.tar') ssh.download_file(remote_file=cert_file_path, local_file=temporary_local_cert_file_path, hostname=settings.server.hostname) ssh.upload_file(local_file=temporary_local_cert_file_path, remote_file=cert_file_path, hostname=capsule_vm.hostname) # delete the temporary file os.remove(temporary_local_cert_file_path) result = capsule_vm.run(installer_cmd, timeout=1500) self.assertEqual(result.return_code, 0) # ensure that capsule refresh-features succeed with self.assertNotRaises(CLIReturnCodeError): Capsule.refresh_features( {'name': capsule_vm._capsule_hostname})
def test_positive_add_subscription_by_id(self): """Test that subscription can be added to activation key @Feature: Activation key - Subscription @Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription @Assert: Subscription successfully added to activation key """ with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) org_id = make_org()['id'] ackey_id = self._make_activation_key()['id'] Subscription.upload({ 'file': manifest.filename, 'organization-id': org_id, }) subs_id = Subscription.list( {'organization-id': org_id}, per_page=False ) result = ActivationKey.add_subscription({ u'id': ackey_id, u'subscription-id': subs_id[0]['id'], }) self.assertIn('Subscription added to activation key', result)
def default_url_on_new_port(oldport, newport): """Creates context where the default smart-proxy is forwarded on a new port :param int oldport: Port to be forwarded. :param int newport: New port to be used to forward `oldport`. :return: A string containing the new capsule URL with port. :rtype: str """ logger = logging.getLogger('robottelo') domain = settings.server.hostname user = settings.server.ssh_username key = settings.server.ssh_key ssh.upload_file(key, '/tmp/dsa_{0}'.format(newport)) ssh.command('chmod 700 /tmp/dsa_{0}'.format(newport)) with ssh._get_connection() as connection: command = u'ssh -i {0} -L {1}:{2}:{3} {4}@{5}'.format( '/tmp/dsa_{0}'.format(newport), newport, domain, oldport, user, domain) logger.debug('Creating tunnel {0}'.format(command)) # Run command and timeout in 30 seconds. _, _, stderr = connection.exec_command(command, 30) stderr = stderr.read() if len(stderr) > 0: logger.debug('Tunnel failed: {0}'.format(stderr)) # Something failed, so raise an exception. raise SSHTunnelError(stderr) yield 'https://{0}:{1}'.format(domain, newport)
def organization_with_tr_data_manifests(cls, options=None): """Import Organizations (from spacewalk-report users) with manifests. :returns: A tuple of SSHCommandResult and a Dictionary containing the transition data of the Import """ # prepare manifests for every organization manifest_list = [] csv_records = cls.csv_to_dataset([options["csv-file"]]) man_dir = ssh.command(u"mktemp -d").stdout[0] for org in set([rec["organization"] for rec in csv_records]): for char in [" ", ".", "#"]: org = org.replace(char, "_") with manifests.clone() as manifest: ssh.upload_file(manifest.content, u"{0}/{1}.zip".format(man_dir, org)) manifest_list.append(u"{0}/{1}.zip".format(man_dir, org)) options.update({"upload-manifests-from": man_dir}) result = cls.organization(options) ssh.command(u"rm -rf {0}".format(man_dir)) transition_data = cls.read_transition_csv( ssh.command(u"ls -v ${HOME}/.transition_data/organizations*").stdout[:-1] ) return (result, transition_data)
def test_positive_enable_by_label(self): """Enable repo from reposet by org label, reposet and product names @id: 5230c1cd-fed7-40ac-8445-bac4f9c5ee68 @Assert: Repository was enabled """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_positive_enable_by_name(self): """Enable repo from reposet by names of reposet, org and product :id: a78537bd-b88d-4f00-8901-e7944e5de729 :expectedresults: Repository was enabled :CaseImportance: Critical """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_positive_enable_by_name(self): """Enable repo from reposet by names of reposet, org and product @id: a78537bd-b88d-4f00-8901-e7944e5de729 @Assert: Repository was enabled """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_positive_associate_scap_policy_with_tailoringfiles_name(self): """Associate tailoring file by name to scap policy :id: d0f9b244-b92d-4889-ba6a-8973ea05bf43 :steps: 1. Login to hammer shell. 2. Execute "policy" command with "create" as sub-command. 3. Pass valid parameters. 4. Associate tailoring file by "tailoring-file" with policy :expectedresults: The policy is created and associated successfully. """ _, file_name = os.path.split(settings.oscap.tailoring_path) ssh.upload_file( local_file=settings.oscap.tailoring_path, remote_file="/tmp/{0}".format(file_name) ) tailoring_file = make_tailoringfile({ 'scap-file': '/tmp/{0}'.format(file_name) }) tailor_profile_id = tailoring_file['tailoring-file-profiles'][0]['id'] scap_policy = make_scap_policy({ 'scap-content-id': self.scap_id_rhel6, 'scap-content-profile-id': self.scap_profile_id_rhel6, 'period': OSCAP_PERIOD['weekly'].lower(), 'weekday': OSCAP_WEEKDAY['friday'].lower(), 'tailoring-file': tailoring_file['name'], 'tailoring-file-profile-id': tailor_profile_id }) self.assertEqual(scap_policy['tailoring-file-id'], tailoring_file['id']) self.assertEqual(scap_policy['tailoring-file-profile-id'], tailor_profile_id)
def upload_manifest_locked(org_id, manifest=None, interface=INTERFACE_API, timeout=None): """Upload a manifest with locking, using the requested interface. :type org_id: int :type manifest: robottelo.manifests.Manifest :type interface: str :type timeout: int :returns: the upload result Note: The manifest uploading is strictly locked only when using this function Usage:: # for API interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_API) # for CLI interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_CLI) # or in one line with default interface result = upload_manifest_locked(org_id, manifests.clone()) subscription_id = result[id'] """ if interface not in [INTERFACE_API, INTERFACE_CLI]: raise ValueError( f'upload manifest with interface "{interface}" not supported') if manifest is None: manifest = clone() if timeout is None: # Set the timeout to 1500 seconds to align with the API timeout. # And as we are in locked state, other functions/tests can try to upload the manifest in # other processes and we do not want to be interrupted by the default configuration # ssh_client timeout. timeout = 1500 if interface == INTERFACE_API: with manifest: result = entities.Subscription().upload( data={'organization_id': org_id}, files={'content': manifest.content}) else: # interface is INTERFACE_CLI with manifest: upload_file(manifest.content, manifest.filename) result = Subscription.upload( { 'file': manifest.filename, 'organization-id': org_id }, timeout=timeout) return result
def test_negative_create_1(self): """@test: Create gpg key with valid name and valid gpg key via file import then try to create new one with same name @feature: GPG Keys @assert: gpg key is not created @BZ: 1172009 """ test_data = {'name': gen_string('alphanumeric')} # Setup data to pass to the factory test_data = test_data.copy() test_data['organization-id'] = self.org['id'] gpg_key = make_gpg_key(test_data) # Can we find the new object? result = GPGKey().exists( {'organization-id': self.org['id']}, (self.search_key, gpg_key[self.search_key]) ) self.assertEqual(gpg_key[self.search_key], result[self.search_key]) # Setup a new key file test_data['key'] = '/tmp/%s' % gen_alphanumeric() gpg_key = create_gpg_key_file() self.assertIsNotNone(gpg_key, 'GPG Key file must be created') ssh.upload_file(local_file=gpg_key, remote_file=test_data['key']) # Try to create a gpg key with the same name with self.assertRaises(CLIReturnCodeError): GPGKey().create(test_data)
def make_file_repository_upload_contents(self, options=None): """Makes a new File repository, Upload File/Multiple Files and asserts its success. """ if options is None: options = { 'name': self.file_repo_name, 'product-id': self.product['id'], 'content-type': 'file', } if not options.get('content-type'): raise CLIFactoryError('Please provide a valid Content Type.') file_repo = make_repository(options) remote_path = "/tmp/{0}".format(RPM_TO_UPLOAD) if 'multi_upload' not in options or not options['multi_upload']: ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file=remote_path) else: remote_path = "/tmp/{}/".format(gen_string('alpha')) ssh.upload_files(local_dir=os.getcwd() + "/../data/", remote_dir=remote_path) result = Repository.upload_content( { 'name': file_repo['name'], 'organization': file_repo['organization'], 'path': remote_path, 'product-id': file_repo['product']['id'], } ) self.assertIn( "Successfully uploaded file '{0}'".format(RPM_TO_UPLOAD), result[0]['message'] ) file_repo = Repository.info({'id': file_repo['id']}) self.assertGreater(int(file_repo['content-counts']['files']), 0) return file_repo
def setUpClass(cls): super(OpenScapTestCase, cls).setUpClass() file_name = settings.oscap.content_path cls.file_name = file_name.split('/')[(file_name.split('/')).__len__() - 1] ssh.upload_file(local_file=settings.oscap.content_path, remote_file="/tmp/{0}".format(cls.file_name))
def test_positive_update_key(self): """@test: Create gpg key with valid name and valid gpg key via file import then update its gpg key file @feature: GPG Keys @assert: gpg key is updated """ gpg_key = make_gpg_key({'organization-id': self.org['id']}) content = gen_alphanumeric(gen_integer(20, 50)) self.assertNotEqual(gpg_key['content'], content) local_key = create_gpg_key_file(content) self.assertIsNotNone(local_key, 'GPG Key file must be created') key = '/tmp/%s' % gen_alphanumeric() ssh.upload_file(local_file=local_key, remote_file=key) GPGKey.update({ 'key': key, 'name': gpg_key['name'], 'organization-id': self.org['id'], }) gpg_key = GPGKey.info({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['content'], content)
def setUpClass(cls): super(OpenScapTestCase, cls).setUpClass() file_name = settings.oscap.content_path cls.file_name = file_name.split('/')[ (file_name.split('/')).__len__() - 1] ssh.upload_file(local_file=settings.oscap.content_path, remote_file="/tmp/{0}".format(cls.file_name))
def test_positive_export_rh_product(self): """Export a repository from the Red Hat product @Feature: Repository - Export @Assert: Repository was successfully exported, rpm files are present on satellite machine """ # Enable RH repository with manifests.clone() as manifest: ssh.upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': self.org['id'], }) RepositorySet.enable({ 'basearch': 'x86_64', 'name': REPOSET['rhva6'], 'organization-id': self.org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'organization-id': self.org['id'], 'product': PRDS['rhel'], }) repo_export_dir = ( '/mnt/{0}/{1}-{2}-{3}/{1}/{4}/content/dist/rhel/server/6/6Server/' 'x86_64/rhev-agent/3/os'.format( self.export_dir, self.org['label'], PRDS['rhel'].replace(' ', '_'), repo['label'], ENVIRONMENT, )) # Update the download policy to 'immediate' Repository.update({ 'download-policy': 'immediate', 'id': repo['id'], }) # Export the repository Repository.export({'id': repo['id']}) # Verify export directory is empty result = ssh.command('ls -l {0} | grep .rpm'.format(repo_export_dir)) self.assertEqual(len(result.stdout), 0) # Synchronize the repository Repository.synchronize({'id': repo['id']}) # Export the repository once again Repository.export({'id': repo['id']}) # Verify RPMs were successfully exported result = ssh.command('ls -l {0} | grep .rpm'.format(repo_export_dir)) self.assertEqual(result.return_code, 0) self.assertGreaterEqual(len(result.stdout), 1)
def test_pre_performance_tuning_apply(self): """In preupgrade scenario we will apply the medium tuning size. :id: preupgrade-83404326-20b7-11ea-a370-48f17f1fc2e1 :steps: 1. Run satellite-installer --disable-system-checks. :expectedresults: Medium tuning parameter should be applied. """ cmd = ( 'grep "mongodb::server::storage_engine: \'wiredTiger\'" ' '/etc/foreman-installer/custom-hiera.yaml' ) mongodb_type = ssh.command(cmd).return_code self._create_custom_hiera_file(mongodb_type, "medium") try: ssh.upload_file('custom-hiera.yaml', '/etc/foreman-installer') command_output = ssh.command( 'satellite-installer -s --disable-system-checks', connection_timeout=1000 ).stdout assert ' Success!' in command_output except Exception: self._create_custom_hiera_file(mongodb_type, "default") ssh.upload_file('custom-hiera.yaml', '/etc/foreman-installer') command_output = ssh.command( 'satellite-installer -s --disable-system-checks', connection_timeout=1000 ).stdout assert ' Success!' in command_output raise
def test_positive_copy_subscription(self): """Copy Activation key and verify contents @Feature: Activation key copy @Steps: 1. Create parent key and add content 2. Copy Activation key by passing id of parent 3. Verify content was successfully copied @Assert: Activation key is successfully copied """ # Begin test setup parent_ak = self._make_activation_key() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({"file": manifest.filename, "organization-id": self.org["id"]}) subscription_result = Subscription.list({"organization-id": self.org["id"]}, per_page=False) ActivationKey.add_subscription({u"id": parent_ak["id"], u"subscription-id": subscription_result[0]["id"]}) # End test setup new_name = gen_string("utf8") result = ActivationKey.copy({u"id": parent_ak["id"], u"new-name": new_name, u"organization-id": self.org["id"]}) self.assertEqual(result[0], u"Activation key copied") result = ActivationKey.subscriptions({u"name": new_name, u"organization-id": self.org["id"]}) # Verify that the subscription copied over self.assertIn(subscription_result[0]["name"], result[3]) # subscription name # subscription list
def test_add_subscription(self): """@Test: Test that subscription can be added to activation key @Feature: Activation key - Host @Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription @Assert: Subscription successfully added to activation key """ manifest = manifests.clone() upload_file(manifest, remote_file=manifest) org_id = make_org()['id'] ackey_id = self._make_activation_key()['id'] Subscription.upload({ 'file': manifest, 'organization-id': org_id, }) subs_id = Subscription.list( {'organization-id': org_id}, per_page=False) result = ActivationKey.add_subscription({ u'id': ackey_id, u'subscription-id': subs_id[0]['id'], }) self.assertIn('Subscription added to activation key', result)
def test_repositoryset_enable_by_label(self): """@Test: Enable repo from reposet by org label, reposet and product names @Feature: Repository-set @Assert: Repository was enabled """ org = make_org() manifest = manifests.clone() upload_file(manifest, remote_file=manifest) Subscription.upload({ u'file': manifest, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def setup_content(request): """Pytest fixture for setting up an organization, manifest, content-view, lifecycle environment, and activation key with subscriptions""" org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) new_product = make_product({'organization-id': org['id']}) new_repo = make_repository({'product-id': new_product['id']}) Repository.synchronize({'id': new_repo['id']}) content_view = make_content_view({'organization-id': org['id']}) ContentView.add_repository( {'id': content_view['id'], 'organization-id': org['id'], 'repository-id': new_repo['id']} ) ContentView.publish({'id': content_view['id']}) env = make_lifecycle_environment({'organization-id': org['id']}) cvv = ContentView.info({'id': content_view['id']})['versions'][0] ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) new_ak = make_activation_key( { 'lifecycle-environment-id': env['id'], 'content-view': content_view['name'], 'organization-id': org['id'], 'auto-attach': False, } ) subs_id = Subscription.list({'organization-id': org['id']}, per_page=False) ActivationKey.add_subscription({'id': new_ak['id'], 'subscription-id': subs_id[0]['id']}) request.cls.setup_org = org request.cls.setup_new_ak = new_ak request.cls.setup_subs_id = subs_id request.cls.setup_env = env request.cls.setup_content_view = content_view
def put(self, local_path, remote_path=None): """Put a local file to the virtual machine.""" if not self._created: raise VirtualMachineError( 'The virtual machine should be created before putting any file' ) ssh.upload_file(local_path, remote_path, hostname=self.ip_addr)
def test_positive_update_key(name, module_org): """Create gpg key with valid name and valid gpg key via file import then update its gpg key file :id: d3a72892-3414-4178-98b7-e0780d9b6587 :parametrized: yes :expectedresults: gpg key is updated :CaseImportance: Critical """ gpg_key = make_content_credential({'organization-id': module_org.id}) content = gen_alphanumeric(gen_integer(20, 50)) assert gpg_key['content'] != content local_key = create_gpg_key_file(content) assert gpg_key, 'GPG Key file must be created' key = '/tmp/%s' % gen_alphanumeric() ssh.upload_file(local_file=local_key, remote_file=key) ContentCredential.update({ 'key': key, 'name': gpg_key['name'], 'organization-id': module_org.id }) gpg_key = ContentCredential.info({ 'name': gpg_key['name'], 'organization-id': module_org.id }) assert gpg_key['content'] == content
def upload_rhsso_entity(json_content, entity_name): """Helper method upload the entity json request as file on RHSSO Server""" with open(entity_name, "w") as file: json.dump(json_content, file) ssh.upload_file(entity_name, entity_name, hostname=settings.rhsso.host_name)
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 6ce2f777-f230-4bb8-9822-2cf3580c21aa :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)).strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Verify product is not synced and doesn't have any content self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(delay / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_enable_by_name(self): """Enable repo from reposet by names of reposet, org and product @Feature: Repository-set @Assert: Repository was enabled """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) RepositorySet.enable({ u'basearch': 'x86_64', u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization': org['name'], u'product': PRDS['rhel'], }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_positive_delete_manifest_as_another_user(self): """Verify that uploaded manifest if visible and deletable by a different user than the one who uploaded it :id: 4861bcbc-785a-436d-98cf-13cfef7d6907 :expectedresults: manifest is refreshed :BZ: 1669241 :CaseImportance: Medium """ org = entities.Organization().create() user1_password = gen_string('alphanumeric') user1 = entities.User( admin=True, password=user1_password, organization=[org], default_organization=org ).create() user2_password = gen_string('alphanumeric') user2 = entities.User( admin=True, password=user2_password, organization=[org], default_organization=org ).create() # use the first admin to upload a manifest with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.with_user(username=user1.login, password=user1_password).upload( {'file': manifest.filename, 'organization-id': org.id} ) # try to search and delete the manifest with another admin Subscription.with_user(username=user2.login, password=user2_password).delete_manifest( {'organization-id': org.id} ) self.assertEquals(0, len(Subscription.list({'organization-id': org.id})))
def test_positive_add_subscription_by_id(self): """Test that subscription can be added to activation key :id: b884be1c-b35d-440a-9a9d-c854c83e10a7 :Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription :expectedresults: Subscription successfully added to activation key :CaseLevel: Integration """ with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) org_id = make_org()['id'] ackey_id = self._make_activation_key()['id'] Subscription.upload({ 'file': manifest.filename, 'organization-id': org_id, }) subs_id = Subscription.list( {'organization-id': org_id}, per_page=False ) result = ActivationKey.add_subscription({ u'id': ackey_id, u'subscription-id': subs_id[0]['id'], }) self.assertIn('Subscription added to activation key', result)
def test_positive_add_subscription_by_id(self): """Test that subscription can be added to activation key :id: b884be1c-b35d-440a-9a9d-c854c83e10a7 :Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription :expectedresults: Subscription successfully added to activation key :BZ: 1463685 :CaseLevel: Integration """ with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) org_id = make_org()['id'] ackey_id = self._make_activation_key({'organization-id': org_id})['id'] Subscription.upload({ 'file': manifest.filename, 'organization-id': org_id, }) subs_id = Subscription.list( {'organization-id': org_id}, per_page=False ) result = ActivationKey.add_subscription({ u'id': ackey_id, u'subscription-id': subs_id[0]['id'], }) self.assertIn('Subscription added to activation key', result)
def test_positive_update_key(self): """Create gpg key with valid name and valid gpg key via file import then update its gpg key file :id: 58a8ed14-adfc-4046-af63-59a7008ff4d7 :expectedresults: gpg key is updated :CaseImportance: Critical """ gpg_key = make_gpg_key({'organization-id': self.org['id']}) content = gen_alphanumeric(gen_integer(20, 50)) self.assertNotEqual(gpg_key['content'], content) local_key = create_gpg_key_file(content) self.assertIsNotNone(local_key, 'GPG Key file must be created') key = '/tmp/%s' % gen_alphanumeric() ssh.upload_file(local_file=local_key, remote_file=key) GPGKey.update({ 'key': key, 'name': gpg_key['name'], 'organization-id': self.org['id'], }) gpg_key = GPGKey.info({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['content'], content)
def organization_with_tr_data_manifests(cls, options=None): """Import Organizations (from spacewalk-report users) with manifests. :returns: A tuple of SSHCommandResult and a Dictionary containing the transition data of the Import """ # prepare manifests for every organization manifest_list = [] csv_records = cls.csv_to_dataset([options['csv-file']]) man_dir = ssh.command(u'mktemp -d').stdout[0] for org in set([rec['organization'] for rec in csv_records]): for char in [' ', '.', '#']: org = org.replace(char, '_') man_file = manifests.clone() ssh.upload_file(man_file, u'{0}/{1}.zip'.format(man_dir, org)) manifest_list.append(u'{0}/{1}.zip'.format(man_dir, org)) os.remove(man_file) options.update({'upload-manifests-from': man_dir}) result = cls.organization(options) ssh.command(u'rm -rf {0}'.format(man_dir)) transition_data = cls.read_transition_csv( ssh.command( u'ls -v ${HOME}/.transition_data/organizations*' ).stdout[:-1] ) return (result, transition_data)
def organization_with_tr_data_manifests(cls, options=None): """Import Organizations (from spacewalk-report users) with manifests. :returns: A tuple of SSHCommandResult and a Dictionary containing the transition data of the Import """ # prepare manifests for every organization manifest_list = [] csv_records = cls.csv_to_dataset([options['csv-file']]) man_dir = ssh.command(u'mktemp -d').stdout[0] for org in set([rec['organization'] for rec in csv_records]): for char in [' ', '.', '#']: org = org.replace(char, '_') with manifests.clone() as manifest: ssh.upload_file( manifest.content, u'{0}/{1}.zip'.format(man_dir, org) ) manifest_list.append(u'{0}/{1}.zip'.format(man_dir, org)) options.update({'upload-manifests-from': man_dir}) result = cls.organization(options) ssh.command(u'rm -rf {0}'.format(man_dir)) transition_data = cls.read_transition_csv( ssh.command( u'ls -v ${HOME}/.transition_data/organizations*' ).stdout[:-1] ) return (result, transition_data)
def setUpClass(cls): super(TailoringFilesTestCase, cls).setUpClass() _, cls.file_name = os.path.split(settings.oscap.tailoring_path) ssh.upload_file( local_file=settings.oscap.tailoring_path, remote_file="/tmp/{0}".format(cls.file_name) )
def _upload_manifest(self, manifest, org_id): """Uploads a manifest file and import it into an organization""" upload_file(manifest, remote_file=manifest) Subscription.upload({ 'file': manifest, 'organization-id': org_id, })
def create(cls, options=None): """ Creates a new record using the arguments passed via dictionary. """ cls.command_sub = 'create' if options is None: options = {} if options['file'] is None: tmpl = 'file content is required for {0}.creation' raise CLIError(tmpl.format(cls.__name__)) if options['file'] == REPORT_TEMPLATE_FILE: local_path = get_data_file(REPORT_TEMPLATE_FILE) else: local_path = '' # --- create file at remote machine --- # (_, layout) = mkstemp(text=True) chmod(layout, 0o700) if not local_path: with open(layout, 'w') as rt: rt.write(options['file']) # End - Special handling of temporary file else: with open(local_path, 'r') as file: file_data = file.read() with open(layout, 'w') as rt: rt.write(file_data) ssh.upload_file(local_file=layout, remote_file=layout) # -------------------------------------- # options['file'] = layout result = cls.execute(cls._construct_command(options), output_format='csv') # Extract new object ID if it was successfully created if len(result) > 0 and 'id' in result[0]: obj_id = result[0]['id'] # Fetch new object # Some Katello obj require the organization-id for subcommands info_options = {'id': obj_id} if cls.command_requires_org: if 'organization-id' not in options: tmpl = 'organization-id option is required for {0}.create' raise CLIError(tmpl.format(cls.__name__)) info_options['organization-id'] = options['organization-id'] new_obj = cls.info(info_options) # stdout should be a dictionary containing the object if len(new_obj) > 0: result = new_obj return result
def test_positive_synchronize_rh_product_past_sync_date(self): """Create a sync plan with past datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence :id: 47280ef4-3936-4dbc-8ed0-1076aa8d40df :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 80 org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': org['id'], 'sync-date': ( datetime.utcnow() - timedelta(interval - delay/2) ).strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Verify product has not been synced yet sleep(delay/4) self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def upload_manifest_locked(org_id, manifest=None, interface=INTERFACE_API, timeout=None): """Upload a manifest with locking, using the requested interface. :type org_id: int :type manifest: robottelo.manifests.Manifest :type interface: str :type timeout: int :returns: the upload result Note: The manifest uploading is strictly locked only when using this function Usage:: # for API interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_API) # for CLI interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_CLI) # or in one line with default interface result = upload_manifest_locked(org_id, manifests.clone()) subscription_id = result[id'] """ if interface not in [INTERFACE_API, INTERFACE_CLI]: raise ValueError( 'upload manifest with interface "{0}" not supported' .format(interface) ) if manifest is None: manifest = clone() if timeout is None: # Set the timeout to 1500 seconds to align with the API timeout. # And as we are in locked state, other functions/tests can try to upload the manifest in # other processes and we do not want to be interrupted by the default configuration # ssh_client timeout. timeout = 1500 if interface == INTERFACE_API: with manifest: result = entities.Subscription().upload( data={'organization_id': org_id}, files={'content': manifest.content}, ) else: # interface is INTERFACE_CLI with manifest: upload_file(manifest.content, manifest.filename) result = Subscription.upload({ 'file': manifest.filename, 'organization-id': org_id, }, timeout=timeout) return result
def manifest_org(org): """Upload a manifest to the organization.""" with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'] }) return org
def test_cv_preupgrade_scenario(self, request): """Pre-upgrade scenario that creates content-view with various repositories. :id: a4ebbfa1-106a-4962-9c7c-082833879ae8 :steps: 1. Create custom repositories of yum, puppet and file type. 2. Create content-view. 3. Add yum, file repositories and puppet module in the content view. 4. Publish the content-view. :expectedresults: Content-view created with various repositories. """ test_name = request.node.name puppet_module = {'name': 'versioned', 'version': '3.3.3'} org = entities.Organization(name=f'{request.node.name}_org').create() product = entities.Product(organization=org, name=f'{request.node.name}_prod').create() yum_repository = entities.Repository(product=product, name=f'{test_name}_yum_repo', url=FAKE_1_YUM_REPO).create() entities.Repository.sync(yum_repository) puppet_repository = entities.Repository( product=product, name=f'{request.node.name}_puppet_repo', content_type="puppet", url=CUSTOM_PUPPET_REPO, ).create() entities.Repository.sync(puppet_repository) puppet_module_list = PuppetModule.list({ 'search': 'name={name} and version={version}'.format(**puppet_module) })[0] file_repository = entities.Repository(product=product, name=f'{test_name}_file_repo', content_type="file").create() remote_file_path = f"/tmp/{RPM_TO_UPLOAD}" ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file=remote_file_path) with open(f'{get_data_file(RPM_TO_UPLOAD)}', "rb") as content: file_repository.upload_content(files={'content': content}) assert RPM_TO_UPLOAD in file_repository.files()["results"][0]['name'] cv = entities.ContentView(name=f"{test_name}_cv", organization=org).create() cv.repository = [yum_repository, file_repository] cv.update(['repository']) ContentView.puppet_module_add({ 'content-view-id': cv.id, 'name': puppet_module_list['name'], 'author': puppet_module_list['author'], }) cv.publish() assert len(cv.puppet_module) == 0 assert len(cv.read_json()['versions']) == 1
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 6ce2f777-f230-4bb8-9822-2cf3580c21aa :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)) .strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Verify product is not synced and doesn't have any content self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(delay/2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay/2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def download_gce_cert(): _, gce_cert = mkstemp(suffix='.json') cert = json.loads(settings.gce.cert) with open(gce_cert, 'w') as f: json.dump(cert, f) ssh.upload_file(gce_cert, settings.gce.cert_path) if ssh.command(f'[ -f {settings.gce.cert_path} ]').return_code != 0: raise GCECertNotFoundError( f"The GCE certificate in path {settings.gce.cert_path} is not found in satellite." ) return download_server_file('json', settings.gce.cert_url)
def gce_cert(): _, gce_cert_file = mkstemp(suffix='.json') cert = json.loads(settings.gce.cert) cert['local_path'] = gce_cert_file with open(gce_cert_file, 'w') as f: json.dump(cert, f) ssh.upload_file(gce_cert_file, settings.gce.cert_path) if ssh.command(f'[ -f {settings.gce.cert_path} ]').return_code != 0: raise GCECertNotFoundError( f"The GCE certificate in path {settings.gce.cert_path} is not found in satellite." ) return cert
def test_pre_performance_tuning_apply(self): """In preupgrade scenario we apply the medium tuning size. :id: preupgrade-83404326-20b7-11ea-a370-48f17f1fc2e1 :steps: 1. Create the custom-hiera.yaml file based on mongodb type and selected tune size. 2. Run the satellite-installer --disable-system-checks to apply the medium tune size. 3. Check the satellite-installer command status 4. Check the applied parameter's value, to make sure the values are set successfully or not. 5. If something gets wrong with updated tune parameter restore the system states with default custom-hiera.yaml file. :expectedresults: Medium tuning parameter should be applied. """ cmd = ('grep "mongodb::server::storage_engine: \'wiredTiger\'" ' '/etc/foreman-installer/custom-hiera.yaml') mongodb_type = ssh.command(cmd).return_code self._create_custom_hiera_file(mongodb_type, "medium") try: ssh.upload_file( local_file='custom-hiera.yaml', remote_file='/etc/foreman-installer/custom-hiera.yaml', ) command_output = ssh.command( 'satellite-installer -s --disable-system-checks', timeout=1000) command_status = [ status.strip() for status in command_output.stdout ] assert 'Success!' in command_status expected_tune_size, actual_tune_size = self._data_creation_of_set_tune_params( MEDIUM_TUNING_DATA, TUNE_DATA_COLLECTION_REGEX, MEDIUM_TUNE_PARAM_GROUPS) for key, value in actual_tune_size.items(): assert expected_tune_size[key] == value except Exception: self._create_custom_hiera_file(mongodb_type, "default") ssh.upload_file( local_file='custom-hiera.yaml', remote_file='/etc/foreman-installer/custom-hiera.yaml', ) command_output = ssh.command( 'satellite-installer -s --disable-system-checks', timeout=1000) command_status = [ status.strip() for status in command_output.stdout ] assert 'Success!' in command_status raise
def generate_certs(self): upload_file( local_file=get_data_file('certs.sh'), remote_file="certs.sh", ) upload_file( local_file=get_data_file('extensions.txt'), remote_file="extensions.txt", ) with get_connection(timeout=300) as connection: result = connection.run("bash certs.sh") assert result.return_code == 0
def test_positive_disable_by_id(self): """Disable repo from reposet by IDs of reposet, org and product :id: 0d6102ba-3fb9-4eb8-972e-d537e252a8e6 :expectedresults: Repository was disabled :CaseImportance: Critical """ org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, u'organization-id': org['id'], }) product_id = Product.info({ u'name': PRDS['rhel'], u'organization-id': org['id'], })['id'] reposet_id = RepositorySet.info({ u'name': REPOSET['rhva6'], u'organization-id': org['id'], u'product-id': product_id, })['id'] RepositorySet.enable({ u'basearch': 'x86_64', u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', }) RepositorySet.disable({ u'basearch': 'x86_64', u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'false')
def _upload_manifest(self, org_id, manifest=None): """Uploads a manifest into an organization. A cloned manifest will be used if ``manifest`` is None. """ if manifest is None: manifest = manifests.clone() upload_file(manifest.content, manifest.filename) Subscription.upload({ u'file': manifest.filename, 'organization-id': org_id, }) manifest.content.close()
def test_positive_delete_subscription(self): """Check if deleting a subscription removes it from Activation key :id: bbbe4641-bfb0-48d6-acfc-de4294b18c15 :expectedresults: Deleting subscription removes it from the Activation key :CaseLevel: Integration """ new_ak = self._make_activation_key() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': self.org['id'], }) subscription_result = Subscription.list( { 'organization-id': self.org['id'], 'order': 'id desc' }, per_page=False) result = ActivationKey.add_subscription({ u'id': new_ak['id'], u'subscription-id': subscription_result[-1]['id'], }) self.assertIn('Subscription added to activation key', result) ak_subs_info = ActivationKey.subscriptions({ u'id': new_ak['id'], u'organization-id': self.org['id'], }) self.assertEqual(len(ak_subs_info), 6) result = ActivationKey.remove_subscription({ u'id': new_ak['id'], u'subscription-id': subscription_result[-1]['id'], }) self.assertIn('Subscription removed from activation key', result) ak_subs_info = ActivationKey.subscriptions({ u'id': new_ak['id'], u'organization-id': self.org['id'], }) self.assertEqual(len(ak_subs_info), 4)
def test_repositoryset_disable_by_id(self): """@Test: Disable repo from reposet by IDs of reposet, org and product @Feature: Repository-set @Assert: Repository was disabled """ org = make_org() manifest = manifests.clone() upload_file(manifest, remote_file=manifest) Subscription.upload({ u'file': manifest, u'organization-id': org['id'], }) product_id = Product.info({ u'name': PRDS['rhel'], u'organization-id': org['id'], })['id'] reposet_id = RepositorySet.info({ u'name': REPOSET['rhva6'], u'organization-id': org['id'], u'product-id': product_id, })['id'] RepositorySet.enable({ u'basearch': 'x86_64', u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', }) RepositorySet.disable({ u'basearch': 'x86_64', u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', }) result = RepositorySet.available_repositories({ u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, }) enabled = [ repo['enabled'] for repo in result if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'false')
def upload_manifest_locked(org_id, manifest, interface=INTERFACE_API): """Upload a manifest with locking, using the requested interface. :type org_id: int :type manifest: robottelo.manifests.Manifest :type interface: str :returns: the upload result Note: The manifest uploading is strictly locked only when using this function Usage:: # for API interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_API) # for CLI interface manifest = manifests.clone() upload_manifest_locked(org_id, manifest, interface=INTERFACE_CLI) # or in one line with default interface result = upload_manifest_locked(org_id, manifests.clone()) subscription_id = result[id'] """ if interface not in [INTERFACE_API, INTERFACE_CLI]: raise ValueError( 'upload manifest with interface "{0}" not supported' .format(interface) ) if interface == INTERFACE_API: with manifest: result = entities.Subscription().upload( data={'organization_id': org_id}, files={'content': manifest.content}, ) else: # interface is INTERFACE_CLI with manifest: upload_file(manifest.content, manifest.filename) result = Subscription.upload({ 'file': manifest.filename, 'organization-id': org_id, }) return result
def test_positive_copy_subscription(self): """@Test: Copy Activation key and verify contents @Feature: Activation key copy @Steps: 1. Create parent key and add content 2. Copy Activation key by passing id of parent 3. Verify content was sucessfully copied @Assert: Activation key is sucessfully copied """ # Begin test setup org_id = make_org()['id'] parent_id = make_activation_key({ u'organization-id': org_id })['id'] manifest = manifests.clone() upload_file(manifest, remote_file=manifest) Subscription.upload({ 'file': manifest, 'organization-id': org_id, }) subscription_result = Subscription.list( {'organization-id': org_id}, per_page=False) ActivationKey.add_subscription({ u'id': parent_id, u'subscription-id': subscription_result[0]['id'], }) # End test setup new_name = gen_string('utf8') result = ActivationKey.copy({ u'id': parent_id, u'new-name': new_name, u'organization-id': org_id, }) self.assertEqual(result[0], u'Activation key copied') result = ActivationKey.subscriptions({ u'name': new_name, u'organization-id': org_id, }) # Verify that the subscription copied over self.assertIn( subscription_result[0]['name'], # subscription name result[3] # subscription list )
def test_positive_upload_basic(self): """@Test: Upload a manifest with minimal input parameters @Feature: Manifest/Subscription - Positive Create @Assert: Manifest is uploaded """ manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(manifest_path, remote_file=manifest_path) with Session(self.browser) as session: session.nav.go_to_select_org(self.organization.name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(manifest_path) self.assertTrue(self.subscriptions.wait_until_element( common_locators['alert.success']))
def test_positive_copy_subscription(self): """Copy Activation key and verify contents :id: f4ee8096-4120-4d06-8c9a-57ac1eaa8f68 :Steps: 1. Create parent key and add content 2. Copy Activation key by passing id of parent 3. Verify content was successfully copied :expectedresults: Activation key is successfully copied :CaseLevel: Integration """ # Begin test setup parent_ak = self._make_activation_key() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': self.org['id'], }) subscription_result = Subscription.list( {'organization-id': self.org['id']}, per_page=False) ActivationKey.add_subscription({ u'id': parent_ak['id'], u'subscription-id': subscription_result[0]['id'], }) # End test setup new_name = gen_string('utf8') result = ActivationKey.copy({ u'id': parent_ak['id'], u'new-name': new_name, u'organization-id': self.org['id'], }) self.assertEqual(result[0], u'Activation key copied') result = ActivationKey.subscriptions({ u'name': new_name, u'organization-id': self.org['id'], }) # Verify that the subscription copied over self.assertIn( subscription_result[0]['name'], # subscription name result[3] # subscription list )