def cert_setup(self, cert_data): # Need a subdirectory under ssl-build with same name as Capsule name with get_connection(timeout=100) as connection: connection.run('mkdir ssl-build/{0}'.format( cert_data['capsule_hostname'])) # Ignore creation error, but assert directory exists assert connection.run('test -e ssl-build/{0}'.format( cert_data['capsule_hostname'])) upload_file( local_file=get_data_file('generate-ca.sh'), remote_file="generate-ca.sh", ) upload_file( local_file=get_data_file('generate-crt.sh'), remote_file="generate-crt.sh", ) upload_file(local_file=get_data_file('openssl.cnf'), remote_file="openssl.cnf") # create the CA cert. with get_connection(timeout=300) as connection: result = connection.run('echo 100001 > serial') result = connection.run("bash generate-ca.sh") assert result.return_code == 0 # create the Satellite's cert with get_connection(timeout=300) as connection: result = connection.run("yes | bash {} {}".format( 'generate-crt.sh', cert_data['sat6_hostname'])) assert result.return_code == 0
def setUpClass(cls): """Get host name, scripts, and create working directory.""" super(KatelloCertsCheckTestCase, cls).setUpClass() _, cls.sat6_hostname = os.path.split(settings.server.hostname) cls.capsule_hostname = 'capsule.example.com' cls.key_file_name = '{0}/{0}.key'.format(cls.sat6_hostname) cls.cert_file_name = '{0}/{0}.crt'.format(cls.sat6_hostname) cls.ca_bundle_file_name = 'cacert.crt' cls.SUCCESS_MSG = "Validation succeeded" # Need a subdirectory under ssl-build with same name as Capsule name with get_connection(timeout=100) as connection: connection.run('mkdir ssl-build/{0}'.format(cls.capsule_hostname)) # Ignore creation error, but assert directory exists assert connection.run('test -e ssl-build/{0}'.format(cls.capsule_hostname)) upload_file( local_file=get_data_file('generate-ca.sh'), remote_file="generate-ca.sh", ) upload_file( local_file=get_data_file('generate-crt.sh'), remote_file="generate-crt.sh", ) upload_file(local_file=get_data_file('openssl.cnf'), remote_file="openssl.cnf") # create the CA cert. with get_connection(timeout=300) as connection: result = connection.run('echo 100001 > serial') result = connection.run("bash generate-ca.sh") assert result.return_code == 0 # create the Satellite's cert with get_connection(timeout=300) as connection: result = connection.run( "yes | bash {} {}".format('generate-crt.sh', cls.sat6_hostname) ) assert result.return_code == 0
def tailoring_file_path(session_target_sat): """Return Tailoring file path.""" local = get_data_file(OSCAP_TAILORING_FILE) session_target_sat.put( local_path=get_data_file(OSCAP_TAILORING_FILE), remote_path=f'/tmp/{OSCAP_TAILORING_FILE}', ) return {'local': local, 'satellite': f'/tmp/{OSCAP_TAILORING_FILE}'}
def setUpClass(cls): super(TailoringFilesTestCase, cls).setUpClass() cls.tailoring_path = get_data_file(settings.oscap.tailoring_path) proxy = entities.SmartProxy().search( query={u'search': u'name={0}'.format(settings.server.hostname)})[0] loc = entities.Location(name=gen_string('alpha')).create() cls.loc_name = loc.name org = entities.Organization(name=gen_string('alpha'), smart_proxy=[proxy]).create() cls.org_name = org.name cls.content_path = get_data_file(settings.oscap.content_path)
def generate_certs(self): upload_file( local_file=get_data_file('certs.sh'), remote_file="certs.sh", ) upload_file( local_file=get_data_file('extensions.txt'), remote_file="extensions.txt", ) with get_connection(timeout=300) as connection: result = connection.run("bash certs.sh") assert result.return_code == 0
def setUpClass(cls): super(TailoringFilesTestCase, cls).setUpClass() cls.tailoring_path = get_data_file(settings.oscap.tailoring_path) proxy = entities.SmartProxy().search( query={ u'search': u'name={0}'.format( settings.server.hostname) } )[0] loc = entities.Location(name=gen_string('alpha')).create() cls.loc_name = loc.name org = entities.Organization(name=gen_string('alpha'), smart_proxy=[proxy]).create() cls.org_name = org.name cls.content_path = get_data_file(settings.oscap.content_path)
def test_external_disa_scap_content(session, default_org, default_location): """Create OpenScap content with external DISA SCAP content. :id: 5f29254e-7c15-45e1-a2ec-4da1d3d8d74d :Steps: 1. Create an openscap content with external DISA SCAP content. 2. Assert that openscap content has been created. :expectedresults: External DISA SCAP content uploaded successfully. :BZ: 2053478 :customerscenario: true :CaseImportance: Medium """ content_name = gen_string('alpha') with session: session.organization.select(org_name=default_org.name) session.location.select(loc_name=default_location.name) session.oscapcontent.create( { 'file_upload.title': content_name, 'file_upload.scap_file': get_data_file('U_RHEL_7_V3R6_STIG_SCAP_1-2_Benchmark.xml'), } ) assert session.oscapcontent.search(content_name)[0]['Title'] == content_name
def test_positive_check_custom_package_details(self): """Upload custom rpm package to repository. Search for package and then open it. Check that package details are available :id: 679622a7-003e-4887-8622-b95b9468da7d :expectedresults: Package is present inside of repository and it possible to view its details :CaseLevel: Integration :BZ: 1387766 """ with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: self.yum_repo.upload_content(files={'content': handle}) with Session(self) as session: session.nav.go_to_select_org(self.organization.name) self.package.select_repo(self.yum_repo.name) self.package.search_and_click(RPM_TO_UPLOAD.split('-')[0]) self.assertIsNone(self.activationkey.wait_until_element( common_locators['alert.error'])) self.package.check_package_details( RPM_TO_UPLOAD.split('-')[0], [['Filename', RPM_TO_UPLOAD]] )
def test_positive_check_custom_package_details( session, module_org, module_yum_repo): """Upload custom rpm package to repository. Search for package and then open it. Check that package details are available :id: 679622a7-003e-4887-8622-b95b9468da7d :expectedresults: Package is present inside of repository and it possible to view its details :CaseLevel: Integration :BZ: 1387766, 1394390 """ with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: module_yum_repo.upload_content(files={'content': handle}) with session: session.organization.select(org_name=module_org.name) assert session.package.search( 'filename = {0}'.format(RPM_TO_UPLOAD), repository=module_yum_repo.name )[0]['RPM'] == RPM_TO_UPLOAD.replace('.rpm', '') repo_details = session.package.read( RPM_TO_UPLOAD.split('-')[0], repository=module_yum_repo.name )['details'] assert repo_details['filename'] == RPM_TO_UPLOAD
def test_positive_update_file_for_imported_content(self): """Create gpg key with valid name and valid gpg key via file import then update its gpg key file :id: 9f74b337-3ea5-48a1-af6e-d72ab41c2348 :expectedresults: gpg key is updated :CaseImportance: Critical """ name = gen_string('alpha') new_key_path = get_data_file(VALID_GPG_KEY_BETA_FILE) with Session(self) as session: make_gpgkey( session, key_path=self.key_path, name=name, org=self.organization.name, upload_key=True, ) self.assertIsNotNone(self.gpgkey.search(name)) self.gpgkey.update(name, new_key=new_key_path) self.assertIsNotNone( self.gpgkey.wait_until_element( common_locators['alert.success_sub_form']))
def make_file_repository_upload_contents(self, options=None): """Makes a new File repository, Upload File/Multiple Files and asserts its success. """ if options is None: options = { 'name': self.file_repo_name, 'product-id': self.product['id'], 'content-type': 'file', } if not options.get('content-type'): raise CLIFactoryError('Please provide a valid Content Type.') file_repo = make_repository(options) remote_path = "/tmp/{0}".format(RPM_TO_UPLOAD) if 'multi_upload' not in options or not options['multi_upload']: ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file=remote_path) else: remote_path = "/tmp/{}/".format(gen_string('alpha')) ssh.upload_files(local_dir=os.getcwd() + "/../data/", remote_dir=remote_path) result = Repository.upload_content( { 'name': file_repo['name'], 'organization': file_repo['organization'], 'path': remote_path, 'product-id': file_repo['product']['id'], } ) self.assertIn( "Successfully uploaded file '{0}'".format(RPM_TO_UPLOAD), result[0]['message'] ) file_repo = Repository.info({'id': file_repo['id']}) self.assertGreater(int(file_repo['content-counts']['files']), 0) return file_repo
def test_positive_add_template(self): """Add config template by using location name and config template name. :id: 8faf60d1-f4d6-4a58-a484-606a42957ce7 :expectedresults: config template is added. :CaseLevel: Integration """ strategy, value = common_locators['all_values_selection'] with Session(self) as session: for template in generate_strings_list(): with self.subTest(template): loc_name = gen_string('alpha') make_loc(session, name=loc_name) self.assertIsNotNone(self.location.search(loc_name)) make_templates( session, name=template, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='Provisioning template', ) self.assertIsNotNone(self.template.search(template)) self.location.search_and_click(loc_name) session.nav.click(tab_locators['context.tab_template']) element = session.nav.wait_until_element( (strategy, value % template)) self.assertIsNotNone(element)
def test_positive_check_custom_package_details(self): """Upload custom rpm package to repository. Search for package and then open it. Check that package details are available :id: 679622a7-003e-4887-8622-b95b9468da7d :expectedresults: Package is present inside of repository and it possible to view its details :CaseLevel: Integration :BZ: 1384673 """ with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: self.yum_repo.upload_content(files={'content': handle}) with Session(self.browser) as session: session.nav.go_to_select_org(self.organization.name) self.package.select_repo(self.yum_repo.name) self.package.search_and_click(RPM_TO_UPLOAD.split('-')[0]) self.assertIsNone(self.activationkey.wait_until_element( common_locators['alert.error'])) self.package.check_package_details( RPM_TO_UPLOAD.split('-')[0], [['Filename', RPM_TO_UPLOAD]] )
def test_positive_update_4(self): """@test: Create gpg key with valid name and valid gpg key text via cut and paste/string then update its gpg key text @feature: GPG Keys @assert: gpg key is updated @bz: 1204602 """ name = gen_string('alpha', 6) new_key_path = get_data_file(VALID_GPG_KEY_BETA_FILE) with Session(self.browser) as session: make_gpgkey( session, key_content=self.key_content, name=name, org=self.organization.name, ) self.assertIsNotNone(self.gpgkey.search(name)) self.gpgkey.update(name, new_key=new_key_path) self.assertIsNotNone(self.gpgkey.wait_until_element( common_locators['alert.success'] ))
def test_positive_add_template(self): """Add config template by using location name and config template name. @feature: Locations @assert: config template is added. """ strategy, value = common_locators['all_values_selection'] with Session(self.browser) as session: for template in generate_strings_list(): with self.subTest(template): loc_name = gen_string('alpha') make_loc(session, name=loc_name) self.assertIsNotNone(self.location.search(loc_name)) make_templates( session, name=template, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='provision', ) self.assertIsNotNone(self.template.search(template)) self.location.search(loc_name).click() session.nav.click(tab_locators['context.tab_template']) element = session.nav.wait_until_element( (strategy, value % template)) self.assertIsNotNone(element)
def test_update_template_os(self): """@Test: Creates new template, along with two OS's and associate list of OS's with created template @Feature: Template - Positive Update @Assert: The template should be updated with newly created OS's successfully """ name = gen_string('alpha') new_name = gen_string('alpha') os_list = [ entities.OperatingSystem().create().name for _ in range(2) ] with Session(self.browser) as session: make_templates( session, name=name, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='provision', ) self.assertIsNotNone(self.template.search(name)) self.template.update(name, False, new_name, new_os_list=os_list) self.assertIsNotNone(self.template.search(new_name))
def test_clone_template(self): """@Test: Assure ability to clone a provisioning template @Feature: Template - Clone @Steps: 1. Go to Provisioning template UI 2. Choose a template and attempt to clone it @Assert: template is cloned """ name = gen_string('alpha') clone_name = gen_string('alpha') os_list = [ entities.OperatingSystem().create().name for _ in range(2) ] with Session(self.browser) as session: make_templates( session, name=name, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='provision', ) self.assertIsNotNone(self.template.search(name)) self.template.clone( name, custom_really=False, clone_name=clone_name, os_list=os_list, ) self.assertIsNotNone(self.template.search(clone_name))
def test_positive_add_template(self): """Add config template by using location name and config template name. :id: 8faf60d1-f4d6-4a58-a484-606a42957ce7 :expectedresults: config template is added. :CaseLevel: Integration """ strategy, value = common_locators['all_values_selection'] with Session(self.browser) as session: for template in generate_strings_list(): with self.subTest(template): loc_name = gen_string('alpha') make_loc(session, name=loc_name) self.assertIsNotNone(self.location.search(loc_name)) make_templates( session, name=template, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='Provisioning template', ) self.assertIsNotNone(self.template.search(template)) self.location.search_and_click(loc_name) session.nav.click(tab_locators['context.tab_template']) element = session.nav.wait_until_element( (strategy, value % template)) self.assertIsNotNone(element)
def test_positive_incremental_update_puppet(): """Incrementally update a CVV with a puppet module. :id: 19b2fe3b-6c91-4713-9910-17517fba661f :expectedresults: The incremental update succeeds with no errors, and the content view is given an additional version. :CaseLevel: Integration """ # Create a content view and add a yum repository to it. Publish the CV. product = entities.Product().create() yum_repo = entities.Repository(content_type='yum', product=product).create() content_view = entities.ContentView(organization=product.organization, repository=[yum_repo]).create() content_view.publish() content_view = content_view.read() # Create a puppet repository and upload a puppet module into it. puppet_repo = entities.Repository(content_type='puppet', product=product).create() with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle: puppet_repo.upload_content(files={'content': handle}) # Extract all the available puppet modules. puppet_modules = content_view.available_puppet_modules()['results'] # Make sure that we have results. Uploading content does not # seem to create a task so we cannot poll it for status. We # should then check that we have some results back before # proceeding. assert len(puppet_modules) > 0 puppet_module = entities.PuppetModule(id=puppet_modules[0]['id']) # Incrementally update the CVV with the puppet module. payload = { 'content_view_version_environments': [{ 'content_view_version_id': content_view.version[0].id, 'environment_ids': [ environment.id for environment in content_view.version[0].read().environment ], }], 'add_content': { 'puppet_module_ids': [puppet_module.id] }, } content_view.version[0].incremental_update(data=payload) content_view = content_view.read() # The CV now has two versions. The first version has no puppet modules, # and the second version has one puppet module. Let's verify this. # NOTE: The `read_json` lines should be refactored after the 'minor' assert len(content_view.version) == 2 for i in range(len(content_view.version)): content_view.version[i] = content_view.version[i].read() content_view.version.sort(key=lambda cvv: cvv.read_json()['minor']) assert len(content_view.version[0].puppet_module) == 0 assert len(content_view.version[1].puppet_module) == 1 assert content_view.version[1].puppet_module[0].id == puppet_module.id
def setUpClass(cls): """Create a product. Make it available as ``cls.product``.""" super(ContentViewVersionSearchTestCase, cls).setUpClass() cls.product = entities.Product().create() repository = entities.Repository(content_type="puppet", product=cls.product).create() with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), "rb") as handle: repository.upload_content(files={"content": handle})
def test_positive_check_custom_package_details(session, module_org, module_yum_repo): """Upload custom rpm package to repository. Search for package and then open it. Check that package details are available :id: 679622a7-003e-4887-8622-b95b9468da7d :expectedresults: Package is present inside of repository and it possible to view its details :CaseLevel: Integration :BZ: 1387766, 1394390 """ with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: module_yum_repo.upload_content(files={'content': handle}) with session: session.organization.select(org_name=module_org.name) assert session.package.search('filename = {0}'.format(RPM_TO_UPLOAD), repository=module_yum_repo.name )[0]['RPM'] == RPM_TO_UPLOAD.replace( '.rpm', '') repo_details = session.package.read( RPM_TO_UPLOAD.split('-')[0], repository=module_yum_repo.name)['details'] assert repo_details['filename'] == RPM_TO_UPLOAD
def create(cls, options=None): """ Creates a new record using the arguments passed via dictionary. """ cls.command_sub = 'create' if options is None: options = {} if options['file'] is None: tmpl = 'file content is required for {0}.creation' raise CLIError(tmpl.format(cls.__name__)) if options['file'] == REPORT_TEMPLATE_FILE: local_path = get_data_file(REPORT_TEMPLATE_FILE) else: local_path = '' # --- create file at remote machine --- # (_, layout) = mkstemp(text=True) chmod(layout, 0o700) if not local_path: with open(layout, 'w') as rt: rt.write(options['file']) # End - Special handling of temporary file else: with open(local_path, 'r') as file: file_data = file.read() with open(layout, 'w') as rt: rt.write(file_data) ssh.upload_file(local_file=layout, remote_file=layout) # -------------------------------------- # options['file'] = layout result = cls.execute(cls._construct_command(options), output_format='csv') # Extract new object ID if it was successfully created if len(result) > 0 and 'id' in result[0]: obj_id = result[0]['id'] # Fetch new object # Some Katello obj require the organization-id for subcommands info_options = {'id': obj_id} if cls.command_requires_org: if 'organization-id' not in options: tmpl = 'organization-id option is required for {0}.create' raise CLIError(tmpl.format(cls.__name__)) info_options['organization-id'] = options['organization-id'] new_obj = cls.info(info_options) # stdout should be a dictionary containing the object if len(new_obj) > 0: result = new_obj return result
def setUpClass(cls): """Create a product. Make it available as ``cls.product``.""" super().setUpClass() cls.product = entities.Product().create() repository = entities.Repository(content_type='puppet', product=cls.product).create() with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle: repository.upload_content(files={'content': handle})
def setUpClass(cls): super(PuppetModuleTestCase, cls).setUpClass() product = entities.Product(organization=cls.session_org).create() cls.repo = entities.Repository( product=product, content_type='puppet', ).create() with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle: cls.repo.upload_content(files={'content': handle})
def setUpClass(cls): super(OpenScapContentTestCase, cls).setUpClass() path = settings.oscap.content_path cls.content_path = get_data_file(path) org = entities.Organization(name=gen_string('alpha')).create() cls.org_name = org.name proxy = entities.SmartProxy().search( query={u'search': u'name={0}'.format(settings.server.hostname)})[0] proxy.organization = [org]
def setUpClass(cls): """Create a product. Make it available as ``cls.product``.""" cls.product = entities.Product().create() repository = entities.Repository( content_type='puppet', product=cls.product, ).create() with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle: repository.upload_content(files={'content': handle})
def test_cv_preupgrade_scenario(self, request): """Pre-upgrade scenario that creates content-view with various repositories. :id: a4ebbfa1-106a-4962-9c7c-082833879ae8 :steps: 1. Create custom repositories of yum, puppet and file type. 2. Create content-view. 3. Add yum, file repositories and puppet module in the content view. 4. Publish the content-view. :expectedresults: Content-view created with various repositories. """ test_name = request.node.name puppet_module = {'name': 'versioned', 'version': '3.3.3'} org = entities.Organization(name=f'{request.node.name}_org').create() product = entities.Product(organization=org, name=f'{request.node.name}_prod').create() yum_repository = entities.Repository(product=product, name=f'{test_name}_yum_repo', url=FAKE_1_YUM_REPO).create() entities.Repository.sync(yum_repository) puppet_repository = entities.Repository( product=product, name=f'{request.node.name}_puppet_repo', content_type="puppet", url=CUSTOM_PUPPET_REPO, ).create() entities.Repository.sync(puppet_repository) puppet_module_list = PuppetModule.list({ 'search': 'name={name} and version={version}'.format(**puppet_module) })[0] file_repository = entities.Repository(product=product, name=f'{test_name}_file_repo', content_type="file").create() remote_file_path = f"/tmp/{RPM_TO_UPLOAD}" ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file=remote_file_path) with open(f'{get_data_file(RPM_TO_UPLOAD)}', "rb") as content: file_repository.upload_content(files={'content': content}) assert RPM_TO_UPLOAD in file_repository.files()["results"][0]['name'] cv = entities.ContentView(name=f"{test_name}_cv", organization=org).create() cv.repository = [yum_repository, file_repository] cv.update(['repository']) ContentView.puppet_module_add({ 'content-view-id': cv.id, 'name': puppet_module_list['name'], 'author': puppet_module_list['author'], }) cv.publish() assert len(cv.puppet_module) == 0 assert len(cv.read_json()['versions']) == 1
def test_positive_uploaded_content_library_sync( self, module_capsule_configured, function_org, function_product, function_lce_library ): """Ensure custom repo with no upstream url and manually uploaded content after publishing to Library is synchronized to capsule automatically :id: f5406312-dd31-4551-9f03-84eb9c3415f5 :customerscenario: true :BZ: 1340686 :expectedresults: custom content is present on external capsule :CaseLevel: System """ repo = entities.Repository(product=function_product, url=None).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': function_lce_library.id} ) result = module_capsule_configured.nailgun_capsule.content_lifecycle_environments() assert len(result['results']) >= 1 assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Create a content view with the repository cv = entities.ContentView(organization=function_org, repository=[repo]).create() # Upload custom content into the repo with open(get_data_file(constants.RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) assert repo.read().content_counts['rpm'] == 1 # Publish new version of the content view cv.publish() cv = cv.read() assert len(cv.version) == 1 self.wait_for_sync(module_capsule_configured) # Verify the RPM published on Capsule caps_repo_url = form_repo_url( module_capsule_configured, org=function_org.label, lce=function_lce_library.label, cv=cv.label, prod=function_product.label, repo=repo.label, ) caps_files = get_repo_files_by_url(caps_repo_url) assert len(caps_files) == 1 assert caps_files[0] == constants.RPM_TO_UPLOAD
def test_positive_search_single_result(self): """Search for puppet modules in a non-empty repository. @Assert: Only the modules in that repository are returned. @Feature: PuppetModule """ with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle: self.repository.upload_content(files={'content': handle}) query = {'repository_id': self.repository.id} self.assertEqual(len(entities.PuppetModule().search(query=query)), 1)
def test_positive_search_single_result(self): """Search for puppet modules in a non-empty repository. @id: 5337b2be-e207-4580-8407-19b88cb40403 @Assert: Only the modules in that repository are returned. """ with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), "rb") as handle: self.repository.upload_content(files={"content": handle}) query = {"repository_id": self.repository.id} self.assertEqual(len(entities.PuppetModule().search(query=query)), 1)
def test_positive_mirroring_policy(self, target_sat): """Assert that the content of a repository with 'Mirror Policy' enabled is restored properly after resync. :id: cbf1c781-cb96-4b4a-bae2-15c9f5be5e50 :steps: 1. Create and sync a repo with 'Mirror Policy - mirror complete' enabled. 2. Remove all packages from the repo and upload another one. 3. Resync the repo again. 4. Check the content was restored properly. :expectedresults: 1. The resync restores the original content properly. :CaseLevel: System """ repo_url = settings.repos.yum_0.url packages_count = constants.FAKE_0_YUM_REPO_PACKAGES_COUNT org = entities.Organization().create() prod = entities.Product(organization=org).create() repo = entities.Repository( download_policy='immediate', mirroring_policy='mirror_complete', product=prod, url=repo_url, ).create() repo.sync() repo = repo.read() assert repo.content_counts['rpm'] == packages_count # remove all packages from the repo and upload another one packages = entities.Package(repository=repo).search(query={'per_page': '1000'}) repo.remove_content(data={'ids': [package.id for package in packages]}) with open(get_data_file(constants.RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) repo = repo.read() assert repo.content_counts['rpm'] == 1 files = get_repo_files_by_url(repo.full_path) assert len(files) == 1 assert constants.RPM_TO_UPLOAD in files # resync the repo again and check the content repo.sync() repo = repo.read() assert repo.content_counts['rpm'] == packages_count files = get_repo_files_by_url(repo.full_path) assert len(files) == packages_count assert constants.RPM_TO_UPLOAD not in files
def test_positive_update_contents(self): """Create a repository and upload RPM contents. @Assert: The repository's contents include one RPM. @Feature: Repository """ # Create a repository and upload RPM content. repo = entities.Repository(product=self.product).create() with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) # Verify the repository's contents. self.assertEqual(repo.read().content_counts['rpm'], 1)
def test_positive_search_single_result(self): """Search for puppet modules in a non-empty repository. :id: 5337b2be-e207-4580-8407-19b88cb40403 :expectedresults: Only the modules in that repository are returned. :CaseImportance: Critical """ with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle: self.repository.upload_content(files={'content': handle}) query = {'repository_id': self.repository.id} self.assertEqual(len(entities.PuppetModule().search(query=query)), 1)
def test_positive_upload_contents_srpm(self): """Create a repository and upload SRPM contents. @id: e091a725-048f-44ca-90cc-c016c450ced9 @Assert: The repository's contents include one SRPM. """ # Create a repository and upload source RPM content. repo = entities.Repository(product=self.product).create() with open(get_data_file(SRPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) # Verify the repository's contents. self.assertEqual(repo.read().content_counts['rpm'], 1)
def setUpClass(cls): super(OpenScapContentTestCase, cls).setUpClass() path = settings.oscap.content_path cls.content_path = get_data_file(path) org = entities.Organization(name=gen_string('alpha')).create() cls.org_name = org.name proxy = entities.SmartProxy().search( query={ u'search': u'name={0}'.format( settings.server.hostname) } )[0] proxy.organization = [org]
def test_positive_update_contents(self): """Create a repository and upload RPM contents. @Assert: The repository's contents include one RPM. @Feature: Repository """ # Create a repository and upload RPM content. repo = entities.Repository(product=self.product).create() with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) # Verify the repository's contents. self.assertEqual(repo.read_json()[u'content_counts'][u'rpm'], 1)
def setUpClass(cls): # noqa """Set up organization, product and repositories for tests.""" super(ContentViewPublishPromoteTestCase, cls).setUpClass() cls.org = entities.Organization().create() cls.product = entities.Product(organization=cls.org).create() cls.yum_repo = entities.Repository(product=cls.product).create() cls.yum_repo.sync() cls.puppet_repo = entities.Repository( content_type='puppet', product=cls.product.id, url=FAKE_0_PUPPET_REPO, ).create() cls.puppet_repo.sync() with open(get_data_file(PUPPET_MODULE_NTP_PUPPETLABS), 'rb') as handle: cls.puppet_repo.upload_content(files={'content': handle})
def test_positive_update_file_for_pasted_content(self): """Create gpg key with valid name and valid gpg key text via cut and paste/string then update its gpg key text @id: 07902ef6-a918-433a-9dad-d5376c3dd001 @assert: gpg key is updated """ name = gen_string("alpha") new_key_path = get_data_file(VALID_GPG_KEY_BETA_FILE) with Session(self.browser) as session: make_gpgkey(session, key_content=self.key_content, name=name, org=self.organization.name) self.assertIsNotNone(self.gpgkey.search(name)) self.gpgkey.update(name, new_key=new_key_path) self.assertIsNotNone(self.gpgkey.wait_until_element(common_locators["alert.success_sub_form"]))
def test_positive_update_contents(self): """Create a repository and upload RPM contents. @id: 8faa64f9-b620-4c0a-8c80-801e8e6436f1 @Assert: The repository's contents include one RPM. @CaseLevel: Integration """ # Create a repository and upload RPM content. repo = entities.Repository(product=self.product).create() with open(get_data_file(RPM_TO_UPLOAD), 'rb') as handle: repo.upload_content(files={'content': handle}) # Verify the repository's contents. self.assertEqual(repo.read().content_counts['rpm'], 1)
def test_positive_update_file_for_imported_content(self): """Create gpg key with valid name and valid gpg key via file import then update its gpg key file @id: 9f74b337-3ea5-48a1-af6e-d72ab41c2348 @assert: gpg key is updated """ name = gen_string("alpha") new_key_path = get_data_file(VALID_GPG_KEY_BETA_FILE) with Session(self.browser) as session: make_gpgkey(session, key_path=self.key_path, name=name, org=self.organization.name, upload_key=True) self.assertIsNotNone(self.gpgkey.search(name)) self.gpgkey.update(name, new_key=new_key_path) self.assertIsNotNone(self.gpgkey.wait_until_element(common_locators["alert.success_sub_form"]))
def test_positive_upload_content_srpm(self): """Create repository and upload a SRPM content @id: 706dc3e2-dacb-4fdd-8eef-5715ce498888 @Assert: File successfully uploaded """ new_repo = self._make_repository({"name": gen_string("alpha", 15)}) ssh.upload_file(local_file=get_data_file(SRPM_TO_UPLOAD), remote_file="/tmp/{0}".format(SRPM_TO_UPLOAD)) result = Repository.upload_content( { "name": new_repo["name"], "organization": new_repo["organization"], "path": "/tmp/{0}".format(SRPM_TO_UPLOAD), "product-id": new_repo["product"]["id"], } ) self.assertIn("Successfully uploaded file '{0}'".format(SRPM_TO_UPLOAD), result[0]["message"])
def test_negative_create_template_with_too_long_name(self): """@Test: Template - Create a new template with 256 characters in name @Feature: Template - Negative Create @Assert: Template is not created """ with Session(self.browser) as session: make_templates( session, name=gen_string('alpha', 256), template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='provision', ) self.assertIsNotNone(self.template.wait_until_element (common_locators['name_haserror']))
def test_negative_create_template_with_blank_name(self, name): """@Test: Create a new template with blank and whitespace in name @Feature: Template - Negative Create @Assert: Template is not created """ with Session(self.browser) as session: make_templates( session, name=name, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='provision', ) self.assertIsNotNone(self.template.wait_until_element (common_locators['name_haserror']))
def test_positive_upload_content(self): """Create repository and upload content @id: eb0ec599-2bf1-483a-8215-66652f948d67 @Assert: upload content is successful """ new_repo = self._make_repository({"name": gen_string("alpha", 15)}) ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file="/tmp/{0}".format(RPM_TO_UPLOAD)) result = Repository.upload_content( { "name": new_repo["name"], "organization": new_repo["organization"], "path": "/tmp/{0}".format(RPM_TO_UPLOAD), "product-id": new_repo["product"]["id"], } ) self.assertIn("Successfully uploaded file '{0}'".format(RPM_TO_UPLOAD), result[0]["message"])
def test_positive_create_template(self, name): """@Test: Create new template @Feature: Template - Positive Create @Assert: New provisioning template of type 'provision' should be created successfully """ with Session(self.browser) as session: make_templates( session, name=name, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), custom_really=True, template_type='provision', ) self.assertIsNotNone(self.template.search(name))
def test_negative_create_with_invalid_file(self): """Create Tailoring files with invalid file :id: 86f5ce13-856c-4e58-997f-fa21093edd04 :steps: 1. Attempt to create tailoring file with invalid file :expectedresults: Tailoring file will not be added to satellite :CaseImportance: Critical """ ssh.upload_file( local_file=get_data_file(SNIPPET_DATA_FILE), remote_file=f'/tmp/{SNIPPET_DATA_FILE}', ) name = gen_string('alphanumeric') with pytest.raises(CLIFactoryError): make_tailoringfile({'name': name, 'scap-file': f'/tmp/{SNIPPET_DATA_FILE}'})
def test_negative_create_template_with_same_name(self): """@Test: Template - Create a new template with same name @Feature: Template - Negative Create @Assert: Template is not created """ name = gen_string('alpha') temp_type = 'provision' template_path = get_data_file(OS_TEMPLATE_DATA_FILE) with Session(self.browser) as session: make_templates(session, name=name, template_path=template_path, custom_really=True, template_type=temp_type) self.assertIsNotNone(self.template.search(name)) make_templates(session, name=name, template_path=template_path, custom_really=True, template_type=temp_type) self.assertIsNotNone(self.template.wait_until_element (common_locators['name_haserror']))
def test_positive_remove_template(self): """Remove config template :id: f510eb04-6bbb-4153-bda0-a183d070b9f2 :expectedresults: config template is added and then removed :CaseLevel: Integration """ strategy, value = common_locators['all_values_selection'] with Session(self.browser) as session: for template_name in generate_strings_list(length=8): with self.subTest(template_name): loc_name = gen_string('alpha') make_templates( session, name=template_name, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), template_type='Provisioning template', custom_really=True, ) self.assertIsNotNone(self.template.search(template_name)) make_loc( session, name=loc_name, organizations=[self.org_.name], ) self.location.search_and_click(loc_name) session.nav.click(tab_locators['context.tab_template']) element = session.nav.wait_until_element( (strategy, value % template_name)) # Item is listed in 'Selected Items' list and not # 'All Items' list. self.assertIsNotNone(element) self.template.delete(template_name, dropdown_present=True) self.location.search_and_click(loc_name) session.nav.click(tab_locators['context.tab_template']) element = session.nav.wait_until_element( (strategy, value % template_name)) # Item is listed in 'All Items' list and not # 'Selected Items' list. self.assertIsNone(element)
def test_positive_remove_template(self): """Remove config template :id: f510eb04-6bbb-4153-bda0-a183d070b9f2 :expectedresults: config template is added and then removed :CaseLevel: Integration """ strategy, value = common_locators['all_values_selection'] with Session(self) as session: for template_name in generate_strings_list(length=8): with self.subTest(template_name): loc_name = gen_string('alpha') make_templates( session, name=template_name, template_path=get_data_file(OS_TEMPLATE_DATA_FILE), template_type='Provisioning template', custom_really=True, ) self.assertIsNotNone(self.template.search(template_name)) make_loc( session, name=loc_name, organizations=[self.org_.name], ) self.location.search_and_click(loc_name) session.nav.click(tab_locators['context.tab_template']) element = session.nav.wait_until_element( (strategy, value % template_name)) # Item is listed in 'Selected Items' list and not # 'All Items' list. self.assertIsNotNone(element) self.template.delete(template_name, dropdown_present=True) self.location.search_and_click(loc_name) session.nav.click(tab_locators['context.tab_template']) element = session.nav.wait_until_element( (strategy, value % template_name)) # Item is listed in 'All Items' list and not # 'Selected Items' list. self.assertIsNone(element)
def test_positive_update_ptable(self): """Update OS partition table @Feature: OS - Positive Update @Assert: OS is updated """ ptable = gen_string('alpha', 4) script_file = get_data_file(PARTITION_SCRIPT_DATA_FILE) with open(script_file, 'r') as file_contents: layout = file_contents.read() entities.PartitionTable( name=ptable, layout=layout, ).create() os_name = entities.OperatingSystem().create().name with Session(self.browser): self.operatingsys.update(os_name, new_ptables=[ptable]) result_obj = self.operatingsys.get_os_entities(os_name, 'ptable') self.assertEqual(ptable, result_obj['ptable'])
def test_positive_upload_content(self): """Create repository and upload content @Feature: Repository @Assert: upload content is successful """ new_repo = self._make_repository({'name': gen_string('alpha', 15)}) ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file="/tmp/{0}".format(RPM_TO_UPLOAD)) result = Repository.upload_content({ 'name': new_repo['name'], 'organization': new_repo['organization'], 'path': "/tmp/{0}".format(RPM_TO_UPLOAD), 'product-id': new_repo['product']['id'], }) self.assertIn( "Successfully uploaded file '{0}'".format(RPM_TO_UPLOAD), result[0]['message'], )
def test_cv_preupgrade_scenario(self, request): """Pre-upgrade scenario that creates content-view with various repositories. :id: a4ebbfa1-106a-4962-9c7c-082833879ae8 :steps: 1. Create custom repositories of yum and file type. 2. Create content-view. 3. Add yum and file repositories in the content view. 4. Publish the content-view. :expectedresults: Content-view created with various repositories. """ test_name = request.node.name org = entities.Organization(name=f'{request.node.name}_org').create() product = entities.Product(organization=org, name=f'{request.node.name}_prod').create() yum_repository = entities.Repository( product=product, name=f'{test_name}_yum_repo', url=settings.repos.yum_1.url).create() entities.Repository.sync(yum_repository) file_repository = entities.Repository(product=product, name=f'{test_name}_file_repo', content_type="file").create() remote_file_path = f"/tmp/{RPM_TO_UPLOAD}" ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file=remote_file_path) with open(f'{get_data_file(RPM_TO_UPLOAD)}', "rb") as content: file_repository.upload_content(files={'content': content}) assert RPM_TO_UPLOAD in file_repository.files()["results"][0]['name'] cv = entities.ContentView(name=f"{test_name}_cv", organization=org).create() cv.repository = [yum_repository, file_repository] cv.update(['repository']) cv.publish() assert len(cv.read_json()['versions']) == 1