def test_reimport_repo_negative(self, test_data): """@test: Import and enable all Repositories from the default data set (predefined source), then try to Import Repositories from the same CSV again. @feature: Repetitive Import Enable Repositories @assert: 3 Repositories imported and enabled, second run should trigger no action. """ # randomize the values for orgs and repos files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'repositories'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) # now proceed with importing the repositories import_repo = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }) for result in (import_org, import_repo): self.assertEqual(result[0].return_code, 0) # get the sat6 mapping of the imported organizations imp_orgs = get_sat6_id(csv_to_dataset([files['users']]), import_org[1]) repos_before = [ Repository.list({'organization-id': imp_org['sat6']}).stdout for imp_org in imp_orgs ] # Reimport the same repos and check for changes in sat6 self.assertEqual( Import.repository({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }).return_code, 0 ) self.assertEqual( repos_before, [ Repository.list({'organization-id': imp_org['sat6']}).stdout for imp_org in imp_orgs ] )
def _enable_repositories(self): """Utility function to retrieve enabled repositories""" for i, repo in enumerate(self.repository_list): repo_id = repo[0] basearch = repo[1] releasever = repo[2] self.logger.info( 'Enabling product {0}: repository id {1} ' 'with baserach {2} and release {3}' .format(i, repo_id, basearch, releasever)) # Enable repos from Repository Set RepositorySet.enable({ 'basearch': basearch, 'id': repo_id, 'product-id': self.pid, 'releasever': releasever, }) # verify enabled repository list result = Repository.list( {'organization-id': self.org_id}, per_page=False ) # repo_list_ids would contain all repositories in the hammer repo list repo_list_ids = [repo['id'] for repo in result] self.logger.debug(repo_list_ids)
def test_product_list_with_default_settings(module_org, default_sat): """Listing product of an organization apart from default organization using hammer does not return output if a defaults settings are applied on org. :id: d5c5edac-b19c-4277-92fe-28d9b9fa43ef :BZ: 1745575 :customerscenario: true :expectedresults: product/reporsitory list should work as expected. """ org_id = str(module_org.id) default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') non_default_org = make_org() default_product = make_product({ 'name': default_product_name, 'organization-id': org_id }) non_default_product = make_product({ 'name': non_default_product_name, 'organization-id': non_default_org['id'] }) for product in default_product, non_default_product: make_repository( { 'organization-id': org_id, 'product-id': product['id'], 'url': settings.repos.yum_0.url, }, ) Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) result = default_sat.cli.Defaults.list(per_page=False) assert any([ res['value'] == org_id for res in result if res['parameter'] == 'organization_id' ]) try: # Verify --organization-id is not required to pass if defaults are set result = default_sat.cli.Product.list() assert any([res['name'] == default_product_name for res in result]) result = default_sat.cli.Repository.list() assert any([res['product'] == default_product_name for res in result]) # verify that defaults setting should not affect other entities product_list = Product.list({'organization-id': non_default_org['id']}) assert non_default_product_name == product_list[0]['name'] repository_list = Repository.list( {'organization-id': non_default_org['id']}) assert non_default_product_name == repository_list[0]['product'] finally: Defaults.delete({'param-name': 'organization_id'}) result = default_sat.cli.Defaults.list(per_page=False) assert not [ res for res in result if res['parameter'] == 'organization_id' ]
def get_enabled_repos(org_id): """Get all enabled repositories ids and names :return map_repo_name_id: The dictionary contains all enabled repositories in Satellite. Map repo-name as key, repo-id as value :raises ``RumtimeException`` if there's no enabled repository in default organization denoted by ``org_id`` """ LOGGER.info('Searching for enabled repositories by hammer CLI:') result = Repository.list( {'organization-id': org_id}, per_page=False ) if result.return_code != 0: raise RuntimeError( 'No enabled repository found in organization {}!' .format(org_id) ) # map repository name with id map_repo_name_id = {} for repo in result.stdout: map_repo_name_id[repo['name']] = repo['id'] return map_repo_name_id
def _enable_repositories(self): """Utility function to retrieve enabled repositories""" for i, repo in enumerate(self.repository_list): repo_id = repo[0] basearch = repo[1] releasever = repo[2] self.logger.info('Enabling product {0}: repository id {1} ' 'with baserach {2} and release {3}'.format( i, repo_id, basearch, releasever)) # Enable repos from Repository Set RepositorySet.enable({ 'basearch': basearch, 'id': repo_id, 'product-id': self.pid, 'releasever': releasever, }) # verify enabled repository list result = Repository.list({'organization-id': self.org_id}, per_page=False) # repo_list_ids would contain all repositories in the hammer repo list repo_list_ids = [repo['id'] for repo in result] self.logger.debug(repo_list_ids)
def test_product_list_with_default_settings(self): """Listing product of an organization apart from default organization using hammer does not return output if a defaults settings are applied on org. :id: d5c5edac-b19c-4277-92fe-28d9b9fa43ef :BZ: 1745575 :expectedresults: product/reporsitory list should work as expected. """ default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') default_org = self.org non_default_org = make_org() default_product = make_product({ 'name': default_product_name, 'organization-id': default_org['id'] }) non_default_product = make_product({ 'name': non_default_product_name, 'organization-id': non_default_org['id'] }) for product in (default_product, non_default_product): make_repository({ 'product-id': product['id'], 'url': FAKE_0_YUM_REPO }) Defaults.add({ 'param-name': 'organization_id', 'param-value': default_org['id'] }) result = ssh.command('hammer defaults list') self.assertTrue(default_org['id'] in "".join(result.stdout)) try: # Verify --organization-id is not required to pass if defaults are set result = ssh.command('hammer product list') self.assertTrue(default_product_name in "".join(result.stdout)) result = ssh.command('hammer repository list') self.assertTrue(default_product_name in "".join(result.stdout)) # verify that defaults setting should not affect other entities product_list = Product.list( {'organization-id': non_default_org['id']}) self.assertEquals(non_default_product_name, product_list[0]['name']) repository_list = Repository.list( {'organization-id': non_default_org['id']}) self.assertEquals(non_default_product_name, repository_list[0]['product']) finally: Defaults.delete({'param-name': 'organization_id'}) result = ssh.command('hammer defaults list') self.assertTrue(default_org['id'] not in "".join(result.stdout))
def test_product_list_with_default_settings(module_org): """Listing product of an organization apart from default organization using hammer does not return output if a defaults settings are applied on org. :id: d5c5edac-b19c-4277-92fe-28d9b9fa43ef :BZ: 1745575 :customerscenario: true :expectedresults: product/reporsitory list should work as expected. """ org_id = str(module_org.id) default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') non_default_org = make_org() default_product = make_product({ 'name': default_product_name, 'organization-id': org_id }) non_default_product = make_product({ 'name': non_default_product_name, 'organization-id': non_default_org['id'] }) for product in default_product, non_default_product: make_repository( { 'organization-id': org_id, 'product-id': product['id'], 'url': FAKE_0_YUM_REPO, }, ) Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) result = ssh.command('hammer defaults list') assert org_id in "".join(result.stdout) try: # Verify --organization-id is not required to pass if defaults are set result = ssh.command('hammer product list') assert default_product_name in "".join(result.stdout) result = ssh.command('hammer repository list') assert default_product_name in "".join(result.stdout) # verify that defaults setting should not affect other entities product_list = Product.list({'organization-id': non_default_org['id']}) assert non_default_product_name == product_list[0]['name'] repository_list = Repository.list( {'organization-id': non_default_org['id']}) assert non_default_product_name == repository_list[0]['product'] finally: Defaults.delete({'param-name': 'organization_id'}) result = ssh.command('hammer defaults list') assert org_id not in "".join(result.stdout)
def create( self, organization_id, product_id=None, download_policy=DOWNLOAD_POLICY_ON_DEMAND, synchronize=True, ): # type: (int, Optional[int], Optional[str], Optional[bool]) -> Dict """Create an RH repository""" if not self.cdn and not self.url: raise ValueError( 'Can not handle Custom repository with url not supplied') if self.cdn: data = self.data if not Repository.list({ 'organization-id': organization_id, 'name': data['repository'], 'product': data['product'], }): RepositorySet.enable({ 'organization-id': organization_id, 'product': data['product'], 'name': data['repository-set'], 'basearch': data.get('arch', constants.DEFAULT_ARCHITECTURE), 'releasever': data.get('releasever'), }) repo_info = Repository.info({ 'organization-id': organization_id, 'name': data['repository'], 'product': data['product'], }) if download_policy: # Set download policy Repository.update({ 'download-policy': download_policy, 'id': repo_info['id'] }) self._repo_info = repo_info if synchronize: self.synchronize() else: repo_info = super().create(organization_id, product_id, download_policy=download_policy) return repo_info
def test_import_repo_default(self, test_data): """@test: Import and enable all Repositories from the default data set (predefined source) @feature: Import Enable Repositories @assert: 3 Repositories imported and enabled """ # randomize the values for orgs and repos files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'repositories'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) # now proceed with importing the repositories import_repo = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'synchronize': True, 'wait': True, }) for result in (import_org, import_repo): self.assertEqual(result[0].return_code, 0) # get the sat6 mapping of the imported organizations imp_orgs = get_sat6_id(csv_to_dataset([files['users']]), import_org[1]) # now to check whether all repos from csv appeared in satellite for imp_org in imp_orgs: self.assertNotEqual( Repository.list({'organization-id': imp_org['sat6']}).stdout, [], )
def get_enabled_repos(org_id): """Get all enabled repositories ids and names :return map_repo_name_id: The dictionary contains all enabled repositories in Satellite. Map repo-name as key, repo-id as value :raises ``RumtimeException`` if there's no enabled repository in default organization denoted by ``org_id`` """ LOGGER.info('Searching for enabled repositories by hammer CLI:') try: result = Repository.list({'organization-id': org_id}, per_page=False) except CLIReturnCodeError: raise RuntimeError( 'No enabled repository found in organization {0}!'.format( org_id)) # map repository name with id map_repo_name_id = {} for repo in result: map_repo_name_id[repo['name']] = repo['id'] return map_repo_name_id
def test_positive_create_with_synced_content(self): """Check if hostgroup with synced kickstart repository can be created :id: 7c51ac72-359c-488a-8658-88b5a94d7e7a :customerscenario: true :expectedresults: Hostgroup should be created and has proper installation content id present :BZ: 1415707 :CaseLevel: Integration """ # Check whether path to kickstart media is set if settings.rhel6_os is None: raise ValueError( 'Installation media path is not set in properties file') # Common entities org = make_org() lce = make_lifecycle_environment({'organization-id': org['id']}) product = make_product({'organization-id': org['id']}) repo = make_repository({ u'url': settings.rhel6_os, u'product-id': product['id'], u'content-type': u'yum', }) Repository.synchronize({'id': repo['id']}) cv = make_content_view({ 'organization-id': org['id'], 'repository-ids': [repo['id']], }) ContentView.publish({'id': cv['id']}) cv = ContentView.info({'id': cv['id']}) cvv = cv['versions'][0] ContentView.version_promote({ 'id': cvv['id'], 'to-lifecycle-environment-id': lce['id'], }) # Get the Partition table ID ptable = PartitionTable.info({'name': DEFAULT_PTABLE}) # Get the arch ID arch = Architecture.list( {'search': 'name={0}'.format(DEFAULT_ARCHITECTURE)})[0] # Get the OS ID os = OperatingSys.list({ 'search': 'name="RedHat" AND major="{0}" OR major="{1}"'.format( RHEL_6_MAJOR_VERSION, RHEL_7_MAJOR_VERSION) })[0] # Update the OS with found arch and ptable OperatingSys.update({ 'id': os['id'], 'architectures': arch['name'], 'partition-tables': ptable['name'], }) proxy = Proxy.list({ 'search': 'url = https://{0}:9090'.format(settings.server.hostname) })[0] # Search for proper installation repository id synced_repo = Repository.list({ 'content-view-version-id': cvv['id'], 'organization-id': org['id'], 'environment-id': lce['id'], })[0] hostgroup = make_hostgroup({ 'lifecycle-environment-id': lce['id'], 'puppet-proxy-id': proxy['id'], 'puppet-ca-proxy-id': proxy['id'], 'content-source-id': proxy['id'], 'content-view-id': cv['id'], 'organization-ids': org['id'], 'architecture-id': arch['id'], 'partition-table-id': ptable['id'], 'operatingsystem-id': os['id'], 'kickstart-repository-id': synced_repo['id'], }) hg = HostGroup.info({'id': hostgroup['id']}, output_format='json') self.assertEqual(hg['kickstart-repository']['id'], synced_repo['id'])
def test_positive_create_with_synced_content(self): """Check if hostgroup with synced kickstart repository can be created :id: 7c51ac72-359c-488a-8658-88b5a94d7e7a :customerscenario: true :expectedresults: Hostgroup should be created and has proper installation content id present :BZ: 1415707 :CaseLevel: Integration """ # Check whether path to kickstart media is set if settings.rhel6_os is None: raise ValueError( 'Installation media path is not set in properties file') # Common entities org = make_org() lce = make_lifecycle_environment({'organization-id': org['id']}) product = make_product({'organization-id': org['id']}) repo = make_repository({ u'url': settings.rhel6_os, u'product-id': product['id'], u'content-type': u'yum', }) Repository.synchronize({'id': repo['id']}) cv = make_content_view({ 'organization-id': org['id'], 'repository-ids': [repo['id']], }) ContentView.publish({'id': cv['id']}) cv = ContentView.info({'id': cv['id']}) cvv = cv['versions'][0] ContentView.version_promote({ 'id': cvv['id'], 'to-lifecycle-environment-id': lce['id'], }) # Get the Partition table ID ptable = PartitionTable.info({'name': DEFAULT_PTABLE}) # Get the arch ID arch = Architecture.list({ 'search': 'name={0}'.format(DEFAULT_ARCHITECTURE)})[0] # Get the OS ID os = OperatingSys.list({ 'search': 'name="RedHat" AND major="{0}" OR major="{1}"'.format( RHEL_6_MAJOR_VERSION, RHEL_7_MAJOR_VERSION) })[0] # Update the OS with found arch and ptable OperatingSys.update({ 'id': os['id'], 'architectures': arch['name'], 'partition-tables': ptable['name'], }) proxy = Proxy.list({ 'search': 'url = https://{0}:9090'.format(settings.server.hostname) })[0] # Search for proper installation repository id synced_repo = Repository.list({ 'content-view-version-id': cvv['id'], 'organization-id': org['id'], 'environment-id': lce['id'], })[0] hostgroup = make_hostgroup({ 'lifecycle-environment-id': lce['id'], 'puppet-proxy-id': proxy['id'], 'puppet-ca-proxy-id': proxy['id'], 'content-source-id': proxy['id'], 'content-view-id': cv['id'], 'organization-ids': org['id'], 'architecture-id': arch['id'], 'partition-table-id': ptable['id'], 'operatingsystem-id': os['id'], 'kickstart-repository-id': synced_repo['id'], }) hg = HostGroup.info({'id': hostgroup['id']}, output_format='json') self.assertEqual( hg['operating-system']['kickstart_repository_id'], synced_repo['id'] )
print "Disabling product: {0} with basearch {1} and release {2}".format( repo_id, basearch, releasever) # Enable repo from Repository Set result = RepositorySet.disable({ 'product-id': pid, 'basearch': basearch, 'releasever': releasever, 'id': repo_id }) #repository_list.append([168, 'x86_64', '6Server']) #repository_list.append([2456,'x86_64','7Server']) #repository_list.append([]) ''' # Enable repo from Repository Set result = RepositorySet.enable({ 'product-id':pid, 'basearch':basearch, 'releasever':releasever, 'id':repo_id }) ''' # verify enabled repository list result = Repository.list({'organization-id': '1'}, per_page=False) result.stdout ''' for repo in repo_list print repo['id'],' | ',repo['name'] '''
def test_import_repo_recovery(self, test_data): """@test: Try to Import Repos with the same name to invoke usage of a recovery strategy (rename, map, none) @feature: Import Repository Recover @assert: 2nd Import will rename the new repos, 3rd import will map them and the 4th one will result in No Action Taken """ # prepare the data files = dict(self.default_dataset[1]) # randomize the values for orgs and repos files = dict(self.default_dataset[1]) for file_ in zip( ['users', 'repositories'], [u'organization_id', u'org_id'], ): files[file_[0]] = update_csv_values( files[file_[0]], file_[1], test_data[file_[0]], self.default_dataset[0] ) # import the prerequisities import_org = Import.organization_with_tr_data( {'csv-file': files['users']} ) for result in ( import_org, Import.repository_with_tr_data( {'csv-file': files['repositories']} ), ): self.assertEqual(result[0].return_code, 0) # clear the .transition_data to clear the transition mapping ssh.command('rm -rf "${HOME}"/.transition_data/repositories*') ssh.command('rm -rf "${HOME}"/.transition_data/products*') # use the default (rename) strategy import_repo_rename = Import.repository_with_tr_data( {'csv-file': files['repositories'], 'verbose': True} ) self.assertEqual(import_repo_rename[0].return_code, 0) for record in import_repo_rename[1][1]: self.assertEqual( Repository.info({'id': record['sat6']}).return_code, 0 ) Import.repository( {'csv-file': files['repositories'], 'delete': True} ) # use the 'none' strategy repos_before = [ Repository.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1] ] Import.repository( {'csv-file': files['repositories'], 'recover': 'none'} ) self.assertEqual( repos_before, [Repository.list({'organization-id': tr['sat6']}).stdout for tr in import_org[1]], ) # use the 'map' strategy import_repo_map = Import.repository_with_tr_data({ 'csv-file': files['repositories'], 'recover': 'map', 'verbose': True, }) self.assertEqual(import_repo_map[0].return_code, 0) for record in import_repo_map[1][1]: self.assertEqual( Repository.info({'id': record['sat6']}).return_code, 0 )
# Enable repo from Repository Set result = RepositorySet.disable({ 'product-id':pid, 'basearch':basearch, 'releasever':releasever, 'id':repo_id }) #repository_list.append([168, 'x86_64', '6Server']) #repository_list.append([2456,'x86_64','7Server']) #repository_list.append([]) ''' # Enable repo from Repository Set result = RepositorySet.enable({ 'product-id':pid, 'basearch':basearch, 'releasever':releasever, 'id':repo_id }) ''' # verify enabled repository list result = Repository.list({'organization-id':'1'}, per_page=False) result.stdout ''' for repo in repo_list print repo['id'],' | ',repo['name'] '''