def test_manifest_upload(self): """ @test: upload manifest (positive) @feature: Subscriptions/Manifest Upload @assert: Manifest are uploaded properly """ mdetails = manifest.fetch_manifest() try: upload_file(mdetails['path'], remote_file=mdetails['path']) result = Subscription.upload({ 'file': mdetails['path'], 'organization-id': self.org['id'] }) self.assertEqual(result.return_code, 0, "Failed to upload manifest") self.assertEqual( len(result.stderr), 0, "There should not be an exception while uploading manifest.") result = Subscription.list({'organization-id': self.org['id']}, per_page=False) self.assertEqual(result.return_code, 0, "Failed to list manifests in this org.") self.assertEqual( len(result.stdout), 8, "There should not be an exception while listing the manifest.") finally: manifest.delete_distributor(ds_uuid=mdetails['uuid'])
def default_url_on_new_port(oldport, newport): """Creates context where the default smart-proxy is forwarded on a new port :param int oldport: Port to be forwarded. :param int newport: New port to be used to forward `oldport`. :return: A string containing the new capsule URL with port. :rtype: str """ logger = logging.getLogger('robottelo') domain = conf.properties['main.server.hostname'] user = conf.properties['main.server.ssh.username'] key = conf.properties['main.server.ssh.key_private'] ssh.upload_file(key, '/tmp/dsa_{0}'.format(newport)) ssh.command('chmod 700 /tmp/dsa_{0}'.format(newport)) with ssh._get_connection() as connection: command = u'ssh -i {0} -L {1}:{2}:{3} {4}@{5}'.format( '/tmp/dsa_{0}'.format(newport), newport, domain, oldport, user, domain) logger.debug('Creating tunnel {0}'.format(command)) # Run command and timeout in 30 seconds. _, _, stderr = connection.exec_command(command, 30) stderr = stderr.read() if len(stderr) > 0: logger.debug('Tunnel failed: {0}'.format(stderr)) # Something failed, so raise an exception. raise SSHTunnelError(stderr) yield 'https://{0}:{1}'.format(domain, newport)
def test_upload_content(self): """@Test: Create repository and upload content @Feature: Repository @Assert: upload content is successful """ name = gen_string('alpha', 15) try: new_repo = self._make_repository({'name': name}) except CLIFactoryError as err: self.fail(err) ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file="/tmp/{0}".format(RPM_TO_UPLOAD)) result = Repository.upload_content({ 'name': new_repo['name'], 'path': "/tmp/{0}".format(RPM_TO_UPLOAD), 'product-id': new_repo['product']['id'], 'organization': new_repo['organization'], }) self.assertEqual( result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual(len(result.stderr), 0, "No error was expected") self.assertIn( "Successfully uploaded file '{0}'" ''.format(RPM_TO_UPLOAD), result.stdout[0]['message'])
def test_manifest_upload(self): """@Test: upload manifest (positive) @Feature: Subscriptions/Manifest Upload @Assert: Manifest are uploaded properly """ upload_file(self.manifest, remote_file=self.manifest) result = Subscription.upload({ 'file': self.manifest, 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "Failed to upload manifest") self.assertEqual( len(result.stderr), 0, "There should not be an exception while uploading manifest.") result = Subscription.list( {'organization-id': self.org['id']}, per_page=False) self.assertEqual(result.return_code, 0, "Failed to list manifests in this org.") self.assertEqual( len(result.stderr), 0, "There should not be an exception while listing the manifest.")
def test_sync_rhrepos(self): """@Test: Create Content RedHat Sync with two repos. @Feature: Content RedHat Sync - Positive Create @Assert: Whether Syncing RedHat Repos is successful """ repos = self.sync.create_repos_tree(RHCT) alert_loc = common_locators['alert.success'] path = clone() # upload_file function should take care of uploading to sauce labs. upload_file(path, remote_file=path) with Session(self.browser) as session: session.nav.go_to_select_org(self.org_name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(path) success_ele = session.nav.wait_until_element(alert_loc) self.assertTrue(success_ele) session.nav.go_to_red_hat_repositories() self.sync.enable_rh_repos(repos) session.nav.go_to_sync_status() sync = self.sync.sync_rh_repos(repos) # syn.sync_rh_repos returns boolean values and not objects self.assertTrue(sync)
def test_upload_content(self): """@Test: Create repository and upload content @Feature: Repository @Assert: upload content is successful """ name = gen_string('alpha', 15) try: new_repo = self._make_repository({'name': name}) except CLIFactoryError as err: self.fail(err) ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file="/tmp/{0}".format(RPM_TO_UPLOAD)) result = Repository.upload_content({ 'name': new_repo['name'], 'path': "/tmp/{0}".format(RPM_TO_UPLOAD), 'product-id': new_repo['product']['id'], 'organization': new_repo['organization'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "No error was expected") self.assertIn("Successfully uploaded file '{0}'" ''.format(RPM_TO_UPLOAD), result.stdout[0]['message'])
def test_manifest_upload(self): """ @test: upload manifest (positive) @feature: Subscriptions/Manifest Upload @assert: Manifest are uploaded properly """ mdetails = manifest.fetch_manifest() try: upload_file(mdetails['path'], remote_file=mdetails['path']) result = Subscription.upload( {'file': mdetails['path'], 'organization-id': self.org['id']}) self.assertEqual(result.return_code, 0, "Failed to upload manifest") self.assertEqual( len(result.stderr), 0, "There should not be an exception while uploading manifest.") result = Subscription.list({'organization-id': self.org['id']}, per_page=False) self.assertEqual(result.return_code, 0, "Failed to list manifests in this org.") self.assertEqual( len(result.stdout), 8, "There should not be an exception while listing the manifest.") finally: manifest.delete_distributor(ds_uuid=mdetails['uuid'])
def _upload_manifest(self, manifest, org_id): """Uploads a manifest file and import it into an organization""" upload_file(manifest, remote_file=manifest) result = Subscription.upload({ 'file': manifest, 'organization-id': org_id, }) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0)
def test_repositoryset_disable_by_id(self): """@Test: Disable repo from reposet by IDs of reposet, org and product @Feature: Repository-set @Assert: Repository was disabled """ org = make_org() manifest = manifests.clone() upload_file(manifest, remote_file=manifest) result = Subscription.upload({ u'file': manifest, u'organization-id': org['id'], }) self.assertEqual(result.return_code, 0) product_id = Product.info({ u'name': PRDS['rhel'], u'organization-id': org['id'], }).stdout['id'] reposet_id = RepositorySet.info({ u'name': REPOSET['rhva6'], u'organization-id': org['id'], u'product-id': product_id, }).stdout['id'] result = RepositorySet.enable({ u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', u'basearch': 'x86_64', }) self.assertEqual(result.return_code, 0) result = RepositorySet.disable({ u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, u'releasever': '6Server', u'basearch': 'x86_64', }) self.assertEqual(result.return_code, 0) result = RepositorySet.available_repositories({ u'id': reposet_id, u'organization-id': org['id'], u'product-id': product_id, }) self.assertEqual(result.return_code, 0) enabled = [ repo['enabled'] for repo in result.stdout if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'false')
def test_enable_manifest_repository_set(self): """@Test: enable repository set (positive) @Feature: Subscriptions/Repository Sets @Assert: you are able to enable and synchronize repository contained in a manifest """ upload_file(self.manifest, remote_file=self.manifest) result = Subscription.upload({ 'file': self.manifest, 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "Failed to upload manifest") self.assertEqual( len(result.stderr), 0, "There should not be an exception while uploading manifest.") result = Subscription.list( {'organization-id': self.org['id']}, per_page=False) self.assertEqual( len(result.stderr), 0, "There should not be an exception while listing the manifest.") result = RepositorySet.enable({ 'name': ( 'Red Hat Enterprise Virtualization Agents ' 'for RHEL 6 Workstation (RPMs)' ), 'organization-id': self.org['id'], 'product': 'Red Hat Enterprise Linux Workstation', 'releasever': '6Workstation', 'basearch': 'x86_64', }) self.assertEqual(result.return_code, 0, "Repo was not enabled") self.assertEqual(len(result.stderr), 0, "No error was expected") result = Repository.synchronize({ 'name': ( 'Red Hat Enterprise Virtualization Agents ' 'for RHEL 6 Workstation ' 'RPMs x86_64 6Workstation' ), 'organization-id': self.org['id'], 'product': 'Red Hat Enterprise Linux Workstation', }) self.assertEqual(result.return_code, 0, "Repo was not synchronized") self.assertEqual(len(result.stderr), 0, "No error was expected")
def make_partition_table(options=None): """ Usage:: hammer partition_table update [OPTIONS] Options:: --file LAYOUT Path to a file that contains the partition layout --os-family OS_FAMILY --id ID resource id --name NAME resource name --new-name NEW_NAME new name for the resource -h, --help print help Usage:: hammer partition_table create [OPTIONS] Options:: --file LAYOUT Path to a file that contains the partition layout --name NAME --os-family OS_FAMILY """ if options is None: options = {} (_, layout) = mkstemp(text=True) os.chmod(layout, 0700) with open(layout, "w") as ptable: ptable.write(options.get('content', 'default ptable content')) args = { u'name': generate_name(), u'file': "/tmp/%s" % generate_name(), u'os-family': random.choice(OPERATING_SYSTEMS) } # Upload file to server ssh.upload_file(local_file=layout, remote_file=args['file']) args = update_dictionary(args, options) args.update(create_object(PartitionTable, args)) return args
def test_import_orgs_manifests(self, test_data): """@test: Import all organizations from the default data set (predefined source) and upload manifests for each of them @feature: Import Organizations including Manifests @assert: 3 Organizations are created with 3 manifests uploaded """ files = dict(self.default_dataset[1]) files['users'] = update_csv_values( files['users'], u'organization_id', test_data, self.default_dataset[0] ) csv_records = csv_to_dataset([files['users']]) # create number of manifests corresponding to the number of orgs manifest_list = [] man_dir = ssh.command( u'mktemp -d -p {}'.format(self.default_dataset[0]) ).stdout[0] for org in set([rec['organization'] for rec in csv_records]): for char in [' ', '.', '#']: org = org.replace(char, '_') man_file = manifests.clone() ssh.upload_file(man_file, u'{0}/{1}.zip'.format(man_dir, org)) manifest_list.append(u'{0}/{1}.zip'.format(man_dir, org)) os.remove(man_file) ssh_import = Import.organization({ 'csv-file': files['users'], 'upload-manifests-from': man_dir, }) # now to check whether the orgs from csv appeared in satellite orgs = set(org['name'] for org in Org.list().stdout) imp_orgs = set( org['organization'] for org in csv_to_dataset([files['users']]) ) self.assertEqual(ssh_import.return_code, 0) self.assertTrue(imp_orgs.issubset(orgs)) for org in imp_orgs: manifest_history = Subscription.manifest_history( {'organization': org} ).stdout[3] self.assertIn('SUCCESS', manifest_history)
def make_template(options=None): """ Usage:: hammer template create [OPTIONS] Options:: --file TEMPLATE Path to a file that contains the template --type TYPE Template type. Eg. snippet, script, provision --name NAME template name --audit-comment AUDIT_COMMENT --operatingsystem-ids OPERATINGSYSTEM_IDS Array of operating systems ID to associate the template with Comma separated list of values. """ # Assigning default values for attribute args = { u'file': "/tmp/%s" % generate_name(), u'type': random.choice(TEMPLATE_TYPES), u'name': generate_name(6), u'audit-comment': None, u'operatingsystem-ids': None, } # Write content to file or random text if options is not None and 'content' in options.keys(): content = options.pop('content') else: content = generate_name() # Special handling for template factory (file_handle, layout) = mkstemp(text=True) chmod(layout, 0700) with open(layout, "w") as ptable: ptable.write(content) # Upload file to server ssh.upload_file(local_file=layout, remote_file=args['file']) # End - Special handling for template factory args = update_dictionary(args, options) args.update(create_object(Template, args)) return args
def make_gpg_key(options=None): """ Usage:: hammer gpg create [OPTIONS] Options:: --organization-id ORGANIZATION_ID organization identifier --name NAME identifier of the GPG Key --key GPG_KEY_FILE GPG Key file -h, --help print help """ # Organization ID is a required field. if not options or not options.get('organization-id', None): raise CLIFactoryError("Please provide a valid ORG ID.") # Create a fake gpg key file if none was provided if not options.get('key', None): (_, key_filename) = mkstemp(text=True) os.chmod(key_filename, 0700) with open(key_filename, "w") as gpg_key_file: gpg_key_file.write(generate_name(minimum=20, maximum=50)) else: # If the key is provided get its local path and remove it from options # to not override the remote path key_filename = options.pop('key') args = { u'name': generate_name(), u'key': "/tmp/%s" % generate_name(), u'organization-id': None, } # Upload file to server ssh.upload_file(local_file=key_filename, remote_file=args['key']) args = update_dictionary(args, options) # gpg create returns a dict inside a list new_obj = create_object(GPGKey, args) args.update(new_obj) return args
def make_template(options=None): """ Usage: hammer template create [OPTIONS] Options: --file TEMPLATE Path to a file that contains the template --type TYPE Template type. Eg. snippet, script, provision --name NAME template name --audit-comment AUDIT_COMMENT --operatingsystem-ids OPERATINGSYSTEM_IDS Array of operating systems ID to associate the template with Comma separated list of values. """ # Assigning default values for attribute args = { 'file': "/tmp/%s" % generate_name(), 'type': random.choice(TEMPLATE_TYPES), 'name': generate_name(6), 'audit-comment': None, 'operatingsystem-ids': None, } # Write content to file or random text if options is not None and 'content' in options.keys(): content = options.pop('content') else: content = generate_name() # Special handling for template factory (file_handle, layout) = mkstemp(text=True) chmod(layout, 0700) with open(layout, "w") as ptable: ptable.write(content) # Upload file to server ssh.upload_file(local_file=layout, remote_file=args['file']) # End - Special handling for template factory args = update_dictionary(args, options) args.update(create_object(Template, args)) return args
def test_negative_create_1(self, data): """@test: Create gpg key with valid name and valid gpg key via file import then try to create new one with same name @feature: GPG Keys @assert: gpg key is not created @BZ: 1172009 """ # Setup data to pass to the factory data = data.copy() data['organization-id'] = self.org['id'] try: new_obj = make_gpg_key(data) except CLIFactoryError as err: self.fail(err) # Can we find the new object? result = GPGKey().exists( {'organization-id': self.org['id']}, (self.search_key, new_obj[self.search_key]) ) self.assertEqual(result.return_code, 0, "Failed to create object") self.assertEqual( len(result.stderr), 0, "There should not be an exception here") self.assertEqual( new_obj[self.search_key], result.stdout[self.search_key]) # Setup a new key file data['key'] = '/tmp/%s' % gen_alphanumeric() gpg_key = self.create_gpg_key_file() self.assertIsNotNone(gpg_key, 'GPG Key file must be created') ssh.upload_file(local_file=gpg_key, remote_file=data['key']) # Try to create a gpg key with the same name new_obj = GPGKey().create(data) self.assertNotEqual( new_obj.return_code, 0, "Object should not be created") self.assertGreater( len(new_obj.stderr), 0, "Should have raised an exception")
def make_partition_table(options=None): """ Usage: hammer partition_table update [OPTIONS] Options: --file LAYOUT Path to a file that contains the partition layout --os-family OS_FAMILY --id ID resource id --name NAME resource name --new-name NEW_NAME new name for the resource -h, --help print help [root@qe-blade-04 ~]# hammer partition_table create --help Usage: hammer partition_table create [OPTIONS] Options: --file LAYOUT Path to a file that contains the partition layout --name NAME --os-family OS_FAMILY """ if options is None: options = {} (file_handle, layout) = mkstemp(text=True) os.chmod(layout, 0700) with open(layout, "w") as ptable: ptable.write(options.get('content', 'default ptable content')) args = { 'name': generate_name(), 'file': "/tmp/%s" % generate_name(), 'os-family': random.choice(OPERATING_SYSTEMS) } # Upload file to server ssh.upload_file(local_file=layout, remote_file=args['file']) args = update_dictionary(args, options) args.update(create_object(PartitionTable, args)) return args
def default_url_on_new_port(oldport, newport): """ Creates context where the default smart-proxy is forwarded on a new port """ logger = logging.getLogger("robottelo") domain = conf.properties['main.server.hostname'] user = conf.properties['main.server.ssh.username'] key = conf.properties['main.server.ssh.key_private'] ssh.upload_file(key, "/tmp/dsa_%s" % newport) ssh.command("chmod 700 /tmp/dsa_%s" % newport) with ssh._get_connection() as connection: command = "ssh -i %s -L %s:%s:%s %s@%s" % ( "/tmp/dsa_%s" % newport, newport, domain, oldport, user, domain ) logger.debug("Creating tunell %s", command) _, _, stderr = connection.exec_command(command, 1000) if len(stderr) > 0: logger.debug("Tunell failed %s", stderr) yield "https://%s:%s" % (domain, newport)
def make_gpg_key(options=None): """ Usage: hammer gpg create [OPTIONS] Options: --organization-id ORGANIZATION_ID organization identifier --name NAME identifier of the GPG Key --key GPG_KEY_FILE GPG Key file -h, --help print help """ # Organization ID is a required field. if not options or not options.get('organization-id', None): raise Exception("Please provide a valid ORG ID.") # Create a fake gpg key file if none was provided if not options.get('key', None): (file_handle, key_filename) = mkstemp(text=True) os.chmod(key_filename, 0700) with open(key_filename, "w") as gpg_key_file: gpg_key_file.write(generate_name(minimum=20, maximum=50)) else: # If the key is provided get its local path and remove it from options # to not override the remote path key_filename = options.pop('key') args = { 'name': generate_name(), 'key': "/tmp/%s" % generate_name(), 'organization-id': None, } # Upload file to server ssh.upload_file(local_file=key_filename, remote_file=args['key']) args = update_dictionary(args, options) # gpg create returns a dict inside a list new_obj = create_object(GPGKey, args) args.update(new_obj) return args
def test_positive_upload_1(self): """@Test: Upload a manifest with minimal input parameters @Feature: Manifest/Subscription - Positive Create @Assert: Manifest is uploaded """ alert_loc = common_locators['alert.success'] manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(manifest_path, remote_file=manifest_path) with Session(self.browser) as session: session.nav.go_to_select_org(self.org_name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(manifest_path) success_ele = self.subscriptions.wait_until_element(alert_loc) self.assertTrue(success_ele)
def test_manifest_refresh(self): """@Test: upload manifest (positive) and refresh @Feature: Subscriptions/Manifest refresh @Assert: Manifests can be refreshed @BZ 1147559 """ upload_file(self.manifest, remote_file=self.manifest) result = Subscription.upload({ 'file': self.manifest, 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while uploading manifest.") result = Subscription.list( {'organization-id': self.org['id']}, per_page=False) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while listing the manifest.") self.assertGreater(len(result.stdout), 0) result = Subscription.refresh_manifest({ 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while refreshing the manifest .")
def test_manifest_history(self): """@Test: upload manifest (positive) and check history @Feature: Subscriptions/Manifest History @Assert: Manifest history is shown properly """ upload_file(self.manifest, remote_file=self.manifest) result = Subscription.upload({ 'file': self.manifest, 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while uploading manifest.") result = Subscription.list( {'organization-id': self.org['id']}, per_page=None) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while listing the manifest.") self.assertGreater(len(result.stdout), 0) result = Subscription.manifest_history({ 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception for manifest history.") self.assertIn('{0} file imported successfully.' ''.format(self.org['name']), ''.join(result.stdout))
def test_negative_create_1(self, data): """@test: Create gpg key with valid name and valid gpg key via file import then try to create new one with same name @feature: GPG Keys @assert: gpg key is not created @BZ: 1172009 """ # Setup data to pass to the factory data = data.copy() data['organization-id'] = self.org['id'] try: new_obj = make_gpg_key(data) except CLIFactoryError as err: self.fail(err) # Can we find the new object? result = GPGKey().exists({'organization-id': self.org['id']}, (self.search_key, new_obj[self.search_key])) self.assertEqual(result.return_code, 0, "Failed to create object") self.assertEqual(len(result.stderr), 0, "There should not be an exception here") self.assertEqual(new_obj[self.search_key], result.stdout[self.search_key]) # Setup a new key file data['key'] = '/tmp/%s' % gen_alphanumeric() gpg_key = self.create_gpg_key_file() self.assertIsNotNone(gpg_key, 'GPG Key file must be created') ssh.upload_file(local_file=gpg_key, remote_file=data['key']) # Try to create a gpg key with the same name new_obj = GPGKey().create(data) self.assertNotEqual(new_obj.return_code, 0, "Object should not be created") self.assertGreater(len(new_obj.stderr), 0, "Should have raised an exception")
def test_add_subscription(self): """@Test: Test that subscription can be added to activation key @Feature: Activation key - Host @Steps: 1. Create Activation key 2. Upload manifest and add subscription 3. Associate the activation key to subscription @Assert: Subscription successfully added to activation key """ manifest = manifests.clone() upload_file(manifest, remote_file=manifest) try: org = make_org(cached=True) activation_key = self._make_activation_key({ u'organization-id': org['id'], }) result = Subscription.upload({ 'file': manifest, 'organization-id': self.org['id'], }) except CLIFactoryError as err: self.fail(err) subs_id = Subscription.list( {'organization-id': self.org['id']}, per_page=False) result = ActivationKey.add_subscription({ u'id': activation_key['id'], u'subscription-id': subs_id.stdout[0]['id'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, 'There should not be an error here') self.assertIn("Subscription added to activation key", result.stdout)
def test_positive_delete_1(self): """@Test: Upload a manifest and delete the manifest. @Feature: Manifest/Subscription - Positive Delete @Assert: Manifest is Deleted successfully """ alert_loc = common_locators["alert.success"] manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(manifest_path, remote_file=manifest_path) with Session(self.browser) as session: session.nav.go_to_select_org(self.org_name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(manifest_path) self.subscriptions.delete() success_ele = self.subscriptions.wait_until_element(alert_loc) self.assertTrue(success_ele)
def _create_ptable(self, layout=None, name=None, os_family=None, content=''): if not layout: (file_handle, layout) = mkstemp(text=True) os.chmod(layout, 0700) with open(layout, "w") as ptable: ptable.write(content) args = { 'file': "/tmp/%s" % generate_name(), 'name': name or generate_name(), 'os-family': os_family or OSES[random.randint(0, len(OSES) - 1)], } # Upload file to server ssh.upload_file(local_file=layout, remote_file=args['file']) PartitionTable().create(args) self.assertTrue(PartitionTable().exists(('name', args['name'])))
def test_positive_upload_1(self): """ @Feature: Manifest/Subscription - Positive Create @Test: Upload a manifest with minimal input parameters @Assert: Manifest is uploaded """ alert_loc = common_locators['alert.success'] mdetails = manifest.fetch_manifest() path = mdetails['path'] try: # upload_file function should take care of uploading to sauce labs. upload_file(mdetails['path'], remote_file=mdetails['path']) with Session(self.browser) as session: session.nav.go_to_select_org(self.org_name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(path) success_ele = self.subscriptions.wait_until_element(alert_loc) self.assertTrue(success_ele) finally: manifest.delete_distributor(ds_uuid=mdetails['uuid'])
def test_manifest_refresh(self): """@Test: upload manifest (positive) and refresh @Feature: Subscriptions/Manifest refresh @Assert: Manifests can be refreshed """ upload_file(self.manifest, remote_file=self.manifest) result = Subscription.upload({ 'file': self.manifest, 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while uploading manifest.") result = Subscription.list( {'organization-id': self.org['id']}, per_page=False) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while listing the manifest.") self.assertGreater(len(result.stdout), 0) result = Subscription.refresh_manifest({ 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0, "return code must be 0, instead got {0}" ''.format(result.return_code)) self.assertEqual( len(result.stderr), 0, "There should not be an exception while refreshing the manifest .")
def make_template(options=None): """ Usage: hammer template create [OPTIONS] Options: --file TEMPLATE Path to a file that contains the template --type TYPE Template type. Eg. snippet, script, provision --name NAME template name --audit-comment AUDIT_COMMENT --operatingsystem-ids OPERATINGSYSTEM_IDS Array of operating systems ID to associate the template with Comma separated list of values. """ #Assigning default values for attribute args = { 'file': "/tmp/%s" % generate_name(), 'type': random.choice(TEMPLATE_TYPES), 'name': generate_name(6), 'audit-comment': '', 'operatingsystem-ids': '', #TODO: Change '' to None when base is coded with disregarding None #TODO: Fix other methods above for this change too } #Special handling for template factory (file_handle, layout) = mkstemp(text=True) chmod(layout, 0700) with open(layout, "w") as ptable: ptable.write(generate_name()) #Upload file to server ssh.upload_file(local_file=layout, remote_file=args['file']) #End - Special handling for template factory args = update_dictionary(args, options) create_object(Template, args) return args
def test_assert_delete_button(self): """@Test: Upload and delete a manifest @Feature: Manifest/Subscription - Positive Delete @Assert: Manifest is Deleted. Delete button is asserted . Subscriptions is asserted """ alert_loc = common_locators["alert.success"] del_mf = locators["subs.delete_manifest"] manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(manifest_path, remote_file=manifest_path) with Session(self.browser) as session: session.nav.go_to_select_org(self.org_name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(manifest_path) self.subscriptions.delete() self.assertTrue(self.subscriptions.wait_until_element(alert_loc)) self.assertTrue(self.subscriptions.wait_until_element(del_mf)) self.assertIsNone(self.subscriptions.search(DEFAULT_SUBSCRIPTION_NAME))
def test_negative_create_3(self, data): """ @test: Create gpg key with invalid name and valid gpg key via file import @feature: GPG Keys @assert: gpg key is not created """ # Setup data to pass to create data = data.copy() data['key'] = '/tmp/%s' % generate_name() data['organization-id'] = self.org['id'] ssh.upload_file( local_file=VALID_GPG_KEY_FILE_PATH, remote_file=data['key']) # Try to create a new object passing @data to factory method new_obj = GPGKey().create(data) self.assertNotEqual( new_obj.return_code, 0, "Object should not be created") self.assertGreater( len(new_obj.stderr), 0, "Should have raised an exception")
def test_negative_create_3(self, data): """ @test: Create gpg key with invalid name and valid gpg key via file import @feature: GPG Keys @assert: gpg key is not created """ # Setup data to pass to create data = data.copy() data['key'] = '/tmp/%s' % generate_name() data['organization-id'] = self.org['id'] ssh.upload_file(local_file=VALID_GPG_KEY_FILE_PATH, remote_file=data['key']) # Try to create a new object passing @data to factory method new_obj = GPGKey().create(data) self.assertNotEqual(new_obj.return_code, 0, "Object should not be created") self.assertGreater(len(new_obj.stderr), 0, "Should have raised an exception")
def test_repositoryset_enable_by_label(self): """@Test: Enable repo from reposet by org label, reposet and product names @Feature: Repository-set @Assert: Repository was enabled """ org = make_org() manifest = manifests.clone() upload_file(manifest, remote_file=manifest) result = Subscription.upload({ u'file': manifest, u'organization-id': org['id'], }) self.assertEqual(result.return_code, 0) result = RepositorySet.enable({ u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], u'releasever': '6Server', u'basearch': 'x86_64', }) self.assertEqual(result.return_code, 0) result = RepositorySet.available_repositories({ u'name': REPOSET['rhva6'], u'organization-label': org['label'], u'product': PRDS['rhel'], }) self.assertEqual(result.return_code, 0) enabled = [ repo['enabled'] for repo in result.stdout if repo['arch'] == 'x86_64' and repo['release'] == '6Server' ][0] self.assertEqual(enabled, 'true')
def test_assert_delete_button(self): """@Test: Upload and delete a manifest @Feature: Manifest/Subscription - Positive Delete @Assert: Manifest is Deleted. Delete button is asserted . Subscriptions is asserted """ alert_loc = common_locators['alert.success'] del_mf = locators['subs.delete_manifest'] manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(manifest_path, remote_file=manifest_path) with Session(self.browser) as session: session.nav.go_to_select_org(self.org_name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(manifest_path) self.subscriptions.delete() self.assertTrue(self.subscriptions.wait_until_element(alert_loc)) self.assertTrue(self.subscriptions.wait_until_element(del_mf)) self.assertIsNone( self.subscriptions.search("Red Hat Employee Subscription"))
def test_sync_rh_repos(self): """@Test: Create Content RedHat Sync with two repos. @Feature: Content RedHat Sync - Positive Create @Assert: Whether Syncing RedHat Repos is successful """ repos = self.sync.create_repos_tree(RHCT) manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(manifest_path, remote_file=manifest_path) with Session(self.browser) as session: session.nav.go_to_select_org(self.org_name) session.nav.go_to_red_hat_subscriptions() self.subscriptions.upload(manifest_path) session.nav.go_to_red_hat_repositories() self.sync.enable_rh_repos(repos) session.nav.go_to_sync_status() sync = self.sync.sync_rh_repos(repos) # syn.sync_rh_repos returns boolean values and not objects self.assertTrue(sync)
def _create_template(self, template=None, template_type=None, name=None, audit_comment=None, operatingsystem_ids=None, content=None): if not template: (file_handle, layout) = mkstemp(text=True) chmod(layout, 0700) with open(layout, "w") as ptable: ptable.write(content) args = { 'file': "/tmp/%s" % generate_name(), 'name': name or generate_name(), 'type': template_type or random.choice(TEMPLATE_TYPES), 'audit-comment': audit_comment, 'operatingsystem-ids': operatingsystem_ids, } # Upload file to server ssh.upload_file(local_file=layout, remote_file=args['file']) Template().create(args) self.assertTrue(Template().exists(('name', args['name'])))
def build_csv_file(rows=None, dirname=None): """Generate a csv file, feed it by the provided data (a list of dictionary objects) and return a path to it """ if rows is None: rows = [{}] file_name = tempfile.mkstemp()[1] with open(file_name, 'wb') as csv_file: csv_writer = csv.DictWriter( csv_file, fieldnames=rows[0].keys(), lineterminator='\n' ) csv_writer.writeheader() for row in rows: csv_writer.writerow({ key: val.encode('utf8') for key, val in row.items() }) if dirname is None: remote_file = file_name else: remote_file = os.path.join(dirname, os.path.basename(file_name)) ssh.upload_file(file_name, remote_file) os.remove(file_name) return remote_file
import time os.getcwd() #sys.stdout = open('timing-upload','w') #os.chdir('/home/chozhang/Documents/robottelo/robottelo/') from robottelo.common import ssh from robottelo.cli.subscription import Subscription # open log file print 'Timing uploading manfest to server:' r = ssh.command('hostname') print r.stdout[0] ssh.command('rm test_manifest.zip') ssh.upload_file('/home/chozhang/Documents/satellite6/20150526-10k-RHEL-Manifest.zip','test_manifest.zip') start = time.time() result = Subscription.upload({ 'file':'./test_manifest.zip', 'organization-id':'1' }) if result.return_code != 0: print "Failed to upload manifest: {0} and return code: {1}" \ .format(result.stderr, result.return_code) else: # print subscription list result = Subscription.list({'organization-id':'1'}, per_page=False) if result.return_code == 0: print "Subscription name: ",result.stdout[0]['name'] print "Subscription id: ",result.stdout[0]['id']
def test_puppet_install(self): """@Test: Perform puppet end to end smoke tests using RH repos. @Feature: Smoke test puppet install and configure on client @Assert: Client should get configured by puppet-module. """ activation_key_name = gen_string('alpha') cloned_manifest_path = manifests.clone() cv_name = gen_string('alpha') env_name = gen_string('alpha') org_name = gen_string('alpha') product_name = gen_string('alpha') puppet_module = 'motd' puppet_repository_name = gen_string('alpha') repos = self.sync.create_repos_tree(SAT6_TOOLS_TREE) rhel_prd = DEFAULT_SUBSCRIPTION_NAME rhel6_repo = conf.properties['clients.rhel6_repo'] upload_file(cloned_manifest_path) with Session(self.browser) as session: # Create New organization make_org(session, org_name=org_name) self.assertIsNotNone(self.org.search(org_name)) # Create New Lifecycle environment make_lifecycle_environment(session, org=org_name, name=env_name) strategy, value = locators['content_env.select_name'] self.assertIsNotNone(self.contentenv.wait_until_element( (strategy, value % env_name) )) session.nav.go_to_red_hat_subscriptions() # Upload manifest from webui self.subscriptions.upload(cloned_manifest_path) self.assertTrue(session.nav.wait_until_element( common_locators['alert.success'] )) session.nav.go_to_red_hat_repositories() # List of dictionary passed to enable the redhat repos # It selects Product->Reposet-> Repo self.sync.enable_rh_repos(repos) session.nav.go_to_sync_status() # Sync the repos # syn.sync_rh_repos returns boolean values and not objects self.assertTrue(self.sync.sync_noversion_rh_repos( PRDS['rhel'], [REPOS['rhst6']['name']] )) # Create custom product make_product(session, org=org_name, name=product_name) self.assertIsNotNone(self.products.search(product_name)) # Create a puppet Repository make_repository( session, org=org_name, name=puppet_repository_name, product=product_name, url=FAKE_6_PUPPET_REPO, repo_type=REPO_TYPE['puppet'] ) self.assertIsNotNone(self.repository.search( puppet_repository_name )) # Sync the repos # syn.sync_rh_repos returns boolean values and not objects session.nav.go_to_sync_status() self.assertIsNotNone(self.sync.sync_custom_repos( product_name, [puppet_repository_name] )) # Create new content-view make_contentview(session, org=org_name, name=cv_name) self.assertIsNotNone(self.content_views.search(cv_name)) # Add YUM repository to content-view self.content_views.add_remove_repos( cv_name, [REPOS['rhst6']['name']], ) if not bz_bug_is_open(1191422): self.assertIsNotNone(self.content_views.wait_until_element( common_locators['alert.success'] )) # Add puppet-module to content-view self.content_views.add_puppet_module( cv_name, puppet_module, filter_term='Latest') # Publish content-view self.content_views.publish(cv_name) if not bz_bug_is_open(1191422): self.assertIsNotNone(self.content_views.wait_until_element( common_locators['alert.success'] )) # Promote content-view to life-cycle environment. self.content_views.promote( cv_name, version='Version 1', env=env_name) if not bz_bug_is_open(1191422): self.assertIsNotNone(self.content_views.wait_until_element( common_locators['alert.success'] )) # Create Activation-Key make_activationkey( session, org=org_name, name=activation_key_name, env=env_name, content_view=cv_name ) self.activationkey.associate_product( activation_key_name, [product_name, rhel_prd]) self.activationkey.enable_repos( activation_key_name, [REPOSET['rhst6']] ) if not bz_bug_is_open(1191541): self.assertIsNotNone(self.activationkey.wait_until_element( common_locators['alert.success'] )) # Create VM with VirtualMachine(distro='rhel67') as vm: vm.install_katello_cert() vm.register_contenthost(activation_key_name, org_name) vm.configure_puppet(rhel6_repo) host = vm.fetch_hostname() session.nav.go_to_hosts() set_context(session, org=ANY_CONTEXT['org']) self.hosts.update_host_bulkactions(host=host, org=org_name) self.hosts.update( name=host, lifecycle_env=env_name, cv=cv_name, reset_puppetenv=True, ) session.nav.go_to_hosts() self.hosts.update( name=host, reset_puppetenv=False, puppet_module=puppet_module ) vm.run(u'puppet agent -t') result = vm.run(u'cat /etc/motd | grep FQDN') self.assertEqual(result.return_code, 0)
def test_end_to_end(self): """@Test: Perform end to end smoke tests using RH repos. @Feature: Smoke test @Assert: All tests should succeed and Content should be successfully fetched by client """ org_name = gen_string('alpha', 6) cv_name = gen_string('alpha', 6) activation_key_name = gen_string('alpha', 6) env_name = gen_string('alpha', 6) repos = self.sync.create_repos_tree(RHVA_REPO_TREE) cloned_manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(cloned_manifest_path) with Session(self.browser) as session: # Create New organization make_org(session, org_name=org_name) self.assertIsNotNone(self.org.search(org_name)) # Create New Lifecycle environment make_lifecycle_environment(session, org=org_name, name=env_name) strategy, value = locators['content_env.select_name'] self.assertIsNotNone(self.contentenv.wait_until_element( (strategy, value % env_name) )) # Navigate UI to select org and redhat subscription page session.nav.go_to_select_org(org_name) session.nav.go_to_red_hat_subscriptions() # Upload manifest from webui self.subscriptions.upload(cloned_manifest_path) self.assertTrue(session.nav.wait_until_element( common_locators['alert.success'] )) session.nav.go_to_red_hat_repositories() # List of dictionary passed to enable the redhat repos # It selects Product->Reposet-> Repo self.sync.enable_rh_repos(repos) session.nav.go_to_sync_status() # Sync the repos # syn.sync_rh_repos returns boolean values and not objects self.assertTrue(self.sync.sync_rh_repos(repos)) # Create new content-view make_contentview(session, org=org_name, name=cv_name) self.assertIsNotNone(self.content_views.search(cv_name)) # Add YUM repository to content-view self.content_views.add_remove_repos( cv_name, [REPOS['rhva65']['name'], REPOS['rhva6']['name']] ) if not bz_bug_is_open(1191422): self.assertIsNotNone(self.content_views.wait_until_element( common_locators['alert.success'] )) # Publish content-view self.content_views.publish(cv_name) if not bz_bug_is_open(1191422): self.assertIsNotNone(self.content_views.wait_until_element( common_locators['alert.success'] )) # Promote content-view to life-cycle environment 1 self.content_views.promote( cv_name, version='Version 1', env=env_name) if not bz_bug_is_open(1191422): self.assertIsNotNone(self.content_views.wait_until_element( common_locators['alert.success'] )) # Create Activation-Key make_activationkey( session, org=org_name, name=activation_key_name, env=env_name, content_view=cv_name ) self.activationkey.associate_product( activation_key_name, [DEFAULT_SUBSCRIPTION_NAME]) self.activationkey.enable_repos( activation_key_name, [REPOSET['rhva6']]) if not bz_bug_is_open(1191541): self.assertIsNotNone(self.activationkey.wait_until_element( common_locators['alert.success'] )) # Create VM with VirtualMachine(distro='rhel66') as vm: vm.install_katello_cert() result = vm.register_contenthost(activation_key_name, org_name) self.assertEqual(result.return_code, 0) # Install contents from sat6 server package_name = 'python-kitchen' result = vm.run(u'yum install -y {0}'.format(package_name)) self.assertEqual(result.return_code, 0) # Verify if package is installed by query it result = vm.run(u'rpm -q {0}'.format(package_name)) self.assertEqual(result.return_code, 0)
def test_end_to_end(self): """@Test: Perform end to end smoke tests using RH repos. @Feature: Smoke test @Assert: All tests should succeed and Content should be successfully fetched by client """ org_name = gen_string("alpha", 6) cv_name = gen_string("alpha", 6) activation_key_name = gen_string("alpha", 6) env_name = gen_string("alpha", 6) product_name = "Red Hat Employee Subscription" repo_names = [ "Red Hat Enterprise Virtualization Agents for RHEL 6 Server " "RPMs x86_64 6.5", "Red Hat Enterprise Virtualization Agents for RHEL 6 Server " "RPMs x86_64 6Server", ] repos = self.sync.create_repos_tree(RHVA_REPO_TREE) package_name = "python-kitchen" cloned_manifest_path = manifests.clone() # upload_file function should take care of uploading to sauce labs. upload_file(cloned_manifest_path, remote_file=cloned_manifest_path) with Session(self.browser) as session: # Create New organization make_org(session, org_name=org_name) self.assertIsNotNone(self.org.search(org_name)) # Create New Lifecycle environment make_lifecycle_environment(session, org=org_name, name=env_name) strategy, value = locators["content_env.select_name"] self.assertIsNotNone( self.contentenv.wait_until_element( (strategy, value % env_name))) # Navigate UI to select org and redhat subscription page session.nav.go_to_select_org(org_name) session.nav.go_to_red_hat_subscriptions() # Upload manifest from webui self.subscriptions.upload(cloned_manifest_path) self.assertTrue( session.nav.wait_until_element( common_locators['alert.success'])) session.nav.go_to_red_hat_repositories() # List of dictionary passed to enable the redhat repos # It selects Product->Reposet-> Repo self.sync.enable_rh_repos(repos) session.nav.go_to_sync_status() # Sync the repos sync = self.sync.sync_rh_repos(repos) # syn.sync_rh_repos returns boolean values and not objects self.assertTrue(sync) # Create new content-view make_contentview(session, org=org_name, name=cv_name) self.assertIsNotNone(self.content_views.search(cv_name)) # Add YUM repository to content-view self.content_views.add_remove_repos(cv_name, repo_names) if not bz_bug_is_open(1191422): self.assertIsNotNone( self.content_views.wait_until_element( common_locators["alert.success"])) # Publish content-view self.content_views.publish(cv_name) if not bz_bug_is_open(1191422): self.assertIsNotNone( self.content_views.wait_until_element( common_locators["alert.success"])) # Promote content-view to life-cycle environment 1 self.content_views.promote(cv_name, version="Version 1", env=env_name) if not bz_bug_is_open(1191422): self.assertIsNotNone( self.content_views.wait_until_element( common_locators["alert.success"])) # Create Activation-Key make_activationkey(session, org=org_name, name=activation_key_name, env=env_name, content_view=cv_name) self.activationkey.associate_product(activation_key_name, [product_name]) self.activationkey.enable_repos(activation_key_name, [REPOSET['rhva6']]) if not bz_bug_is_open(1191541): self.assertIsNotNone( self.activationkey.wait_until_element( common_locators["alert.success"])) # Create VM with VirtualMachine(distro='rhel66') as vm: # Download and Install rpm result = vm.run( "wget -nd -r -l1 --no-parent -A '*.noarch.rpm' " "http://{0}/pub/".format(self.server_name)) self.assertEqual( result.return_code, 0, "failed to fetch katello-ca rpm: {0}, return code: {1}". format(result.stderr, result.return_code)) result = vm.run('rpm -i katello-ca-consumer*.noarch.rpm') self.assertEqual( result.return_code, 0, "failed to install katello-ca rpm: {0}, return code: {1}". format(result.stderr, result.return_code)) # Register client with foreman server using activation-key result = vm.run( 'subscription-manager register --activationkey {0} ' '--org {1} --force'.format(activation_key_name, org_name)) self.assertEqual( result.return_code, 0, "failed to register client:: {0} and return code: {1}". format(result.stderr, result.return_code)) # Install contents from sat6 server result = vm.run('yum install -y {0}'.format(package_name)) self.assertEqual( result.return_code, 0, "Package install failed: {0} and return code: {1}".format( result.stderr, result.return_code)) # Verify if package is installed by query it result = vm.run('rpm -q {0}'.format(package_name)) self.assertIn(package_name, result.stdout[0])
def test_end_to_end(self): """@Test: Perform end to end smoke tests using RH repos. 1. Create new organization and environment 2. Upload manifest 3. Sync a RedHat repository 4. Create content-view 5. Add repository to contet-view 6. Promote/publish content-view 7. Create an activation-key 8. Add product to activation-key 9. Create new virtualmachine 10. Pull rpm from Foreman server and install on client 11. Register client with foreman server using activation-key 12. Install rpm on client @Feature: Smoke test @Assert: All tests should succeed and Content should be successfully fetched by client """ # Product, RepoSet and repository variables rhel_product_name = 'Red Hat Enterprise Linux Server' rhel_repo_set = ( 'Red Hat Enterprise Virtualization Agents ' 'for RHEL 6 Server (RPMs)' ) rhel_repo_name = ( 'Red Hat Enterprise Virtualization Agents ' 'for RHEL 6 Server ' 'RPMs x86_64 6Server' ) org_name = random.choice(generate_strings_list()) # Create new org and environment new_org = make_org({u'name': org_name}) new_env = make_lifecycle_environment({ u'organization-id': new_org['id'], u'name': gen_alphanumeric(), }) # Clone manifest and upload it manifest = manifests.clone() upload_file(manifest, remote_file=manifest) result = Subscription.upload({ u'file': manifest, u'organization-id': new_org['id'], }) self.assertEqual( result.return_code, 0, "Failed to upload manifest: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Enable repo from Repository Set result = RepositorySet.enable({ u'name': rhel_repo_set, u'organization-id': new_org['id'], u'product': rhel_product_name, u'releasever': '6Server', u'basearch': 'x86_64', }) self.assertEqual( result.return_code, 0, "Repo was not enabled: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Fetch repository info result = Repository.info({ u'name': rhel_repo_name, u'product': rhel_product_name, u'organization-id': new_org['id'], }) rhel_repo = result.stdout # Synchronize the repository result = Repository.synchronize({ u'name': rhel_repo_name, u'organization-id': new_org['id'], u'product': rhel_product_name, }) self.assertEqual( result.return_code, 0, "Repo was not synchronized: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Create CV and associate repo to it new_cv = make_content_view({u'organization-id': new_org['id']}) result = ContentView.add_repository({ u'id': new_cv['id'], u'repository-id': rhel_repo['id'], u'organization-id': new_org['id'], }) self.assertEqual( result.return_code, 0, "Failed repository association: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Publish a version1 of CV result = ContentView.publish({u'id': new_cv['id']}) self.assertEqual( result.return_code, 0, "Version1 publishing failed: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Get the CV info result = ContentView.info({u'id': new_cv['id']}) self.assertEqual( result.return_code, 0, "ContentView was not found: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Store the version1 id version1_id = result.stdout['versions'][0]['id'] # Promotion of version1 to next env result = ContentView.version_promote({ u'id': version1_id, u'to-lifecycle-environment-id': new_env['id'], }) self.assertEqual( result.return_code, 0, "version1 promotion failed: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Create activation key activation_key = make_activation_key({ u'name': gen_alphanumeric(), u'lifecycle-environment-id': new_env['id'], u'organization-id': new_org['id'], u'content-view': new_cv['name'], }) # List the subscriptions in given org result = Subscription.list( {u'organization-id': new_org['id']}, per_page=False ) self.assertEqual( result.return_code, 0, "Failed to list subscriptions: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Get the subscription ID from subscriptions list for subscription in result.stdout: if subscription['name'] == "Red Hat Employee Subscription": subscription_id = subscription['id'] subscription_quantity = int(subscription['quantity']) self.assertGreater( int(subscription_quantity), 0, 'Unexpected subscription quantity {0}' .format(subscription_quantity) ) # Add the subscriptions to activation-key result = ActivationKey.add_subscription({ u'id': activation_key['id'], u'subscription-id': subscription_id, u'quantity': 1, }) self.assertEqual( result.return_code, 0, "Failed to add subscription: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Create VM package_name = "python-kitchen" server_name = conf.properties['main.server.hostname'] with VirtualMachine(distro='rhel66') as vm: # Download and Install rpm result = vm.run( "wget -nd -r -l1 --no-parent -A '*.noarch.rpm' http://{0}/pub/" .format(server_name) ) self.assertEqual( result.return_code, 0, "failed to fetch katello-ca rpm: {0}, return code: {1}" .format(result.stderr, result.return_code) ) result = vm.run( 'rpm -i katello-ca-consumer*.noarch.rpm' ) self.assertEqual( result.return_code, 0, "failed to install katello-ca rpm: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Register client with foreman server using activation-key result = vm.run( u'subscription-manager register --activationkey {0} ' '--org {1} --force' .format(activation_key['name'], new_org['label']) ) self.assertEqual( result.return_code, 0, "failed to register client:: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Enable Red Hat Enterprise Virtualization Agents repo via cli # As the below repo is disabled by default under ak's prd-content result = vm.run( 'subscription-manager repos --enable ' 'rhel-6-server-rhev-agent-rpms' ) self.assertEqual( result.return_code, 0, "Enabling repo failed: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Install contents from sat6 server result = vm.run('yum install -y {0}'.format(package_name)) self.assertEqual( result.return_code, 0, "Package install failed: {0} and return code: {1}" .format(result.stderr, result.return_code) ) # Verify if package is installed by query it result = vm.run('rpm -q {0}'.format(package_name)) self.assertIn(package_name, result.stdout[0])
# Can we find the new object? result = GPGKey().exists({'organization-id': self.org['id']}, (self.search_key, new_obj[self.search_key])) self.assertEqual(result.return_code, 0, "Failed to create object") self.assertEqual(len(result.stderr), 0, "There should not be an exception here") self.assertEqual(new_obj[self.search_key], result.stdout[self.search_key]) # Setup a new key file data['key'] = '/tmp/%s' % generate_name() gpg_key = self.create_gpg_key_file() self.assertIsNotNone(gpg_key, 'GPG Key file must be created') ssh.upload_file(local_file=gpg_key, remote_file=data['key']) # Try to create a gpg key with the same name new_obj = GPGKey().create(data) self.assertNotEqual(new_obj.return_code, 0, "Object should not be created") self.assertGreater(len(new_obj.stderr), 0, "Should have raised an exception") @data(*positive_create_data()) def test_negative_create_2(self, data): """ @test: Create gpg key with valid name and no gpg key @feature: GPG Keys @assert: gpg key is not created """