def test_positive_add_repo_from_product_with_repos(self): """@test: Create gpg key via file import and associate with custom repo GPGKey should contain valid name and valid key and should be associated to one repository from custom product. Make sure custom product should have more than one repository. @feature: GPG Keys @assert: gpg key is associated with the repository """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id']) # First repo should have a valid gpg key assigned repo = Repository.info({'id': repos.pop(0)['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) # The rest of repos should not for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('id'), gpg_key['id'])
def test_delete_random_docker_repo(self): """@Test: Create Docker-type repositories on multiple products and delete a random repository from a random product. @Assert: Random repository can be deleted from random product without altering the other products. @Feature: Docker """ products = [ make_product({'organization-id': self.org_id}) for _ in range(randint(2, 5)) ] repos = [] for product in products: for _ in range(randint(2, 3)): repos.append(_make_docker_repo(product['id'])) # Select random repository and delete it repo = choice(repos) repos.remove(repo) result = Repository.delete({'id': repo['id']}) self.assertEqual(result.return_code, 0) result = Repository.info({'id': repo['id']}) self.assertNotEqual(result.return_code, 0) # Verify other repositories were not touched for repo in repos: result = Repository.info({'id': repo['id']}) self.assertEqual(result.return_code, 0) self.assertIn( result.stdout['product']['id'], [product['id'] for product in products], )
def test_positive_update_key_for_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key @id: a95eb51b-4b6b-4c04-bb4d-cbe600431850 @assert: gpg key is associated with product before/after update as well as with the repositories @CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repositories for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), new_name)
def test_positive_delete_key_for_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then delete it @id: f92d4643-1892-4f95-ae6b-fcea8e726946 @assert: gpg key is associated with product and its repositories during creation but removed from the product and the repositories after deletion @CaseLevel: Integration """ # Create product, repositories and gpg key product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated with product and its repositories product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was disassociated from the product and its # repositories product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_update_key_for_repo_from_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then update the key :id: 773a9141-9f04-40ba-b3df-4b6d80db25a6 :expectedresults: gpg key is associated with a single repository before/after update and not associated with product or other repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a single repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key']['name'], gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the associated repository repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key'].get('name'), new_name) # Verify changes are not reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], new_name) # Verify changes are not reflected in the rest of repositories for repo in repos[1:]: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), new_name)
def test_key_associate_13(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then update the key @feature: GPG Keys @assert: gpg key is associated with a single repository before/after update and not associated with product or other repositories """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a single repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key']['name'], gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the associated repository repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key'].get('name'), new_name) # Verify changes are not reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], new_name) # Verify changes are not reflected in the rest of repositories for repo in repos[1:]: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), new_name)
def test_positive_update_key_for_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository then update the key @id: 3fb550a7-507e-4988-beb6-35bdfc2e99a8 @assert: gpg key is associated with product before/after update as well as with the repository @CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create a repository and assign it to the product repo = make_repository({'product-id': product['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repository repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('id'), gpg_key['id'])
def test_positive_delete_by_id(self): """Check if repository can be created and deleted @Feature: Repository @Assert: Repository is created and then deleted """ for name in valid_data_list(): with self.subTest(name): new_repo = self._make_repository({u'name': name}) Repository.delete({u'id': new_repo['id']}) with self.assertRaises(CLIReturnCodeError): Repository.info({u'id': new_repo['id']})
def test_positive_delete_ostree_by_id(self): """Delete Ostree repository by id @id: 171917f5-1a1b-440f-90c7-b8418f1da132 @Assert: Repository is deleted by id """ new_repo = self._make_repository( {u"content-type": u"ostree", u"publish-via-http": u"false", u"url": FEDORA23_OSTREE_REPO} ) Repository.delete({u"id": new_repo["id"]}) with self.assertRaises(CLIReturnCodeError): Repository.info({u"id": new_repo["id"]})
def test_positive_delete_by_id(self): """Check if repository can be created and deleted @id: bcf096db-0033-4138-90a3-cb7355d5dfaf @Assert: Repository is created and then deleted """ for name in valid_data_list(): with self.subTest(name): new_repo = self._make_repository({u"name": name}) Repository.delete({u"id": new_repo["id"]}) with self.assertRaises(CLIReturnCodeError): Repository.info({u"id": new_repo["id"]})
def test_delete_docker_repo(self): """@Test: Create and delete a Docker-type repository @Assert: A repository is created with a Docker image and then deleted. @Feature: Docker """ repo = _make_docker_repo( make_product({'organization-id': self.org_id})['id']) Repository.delete({'id': repo['id']}) with self.assertRaises(CLIReturnCodeError): Repository.info({'id': repo['id']})
def test_positive_delete_key_for_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository then delete it :id: a5d4ea02-f015-4026-b4dc-7365eaf00049 :expectedresults: gpg key is associated with product but and its repository during creation but removed from product and repository after deletion :CaseLevel: Integration """ # Create product, repository and gpg key product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated both with product and its repository product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was disassociated from the product and its repository product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_delete_key_for_repo_from_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then delete the key :id: e7ed4ed9-ecfe-4954-b806-cdd0668e8822 :expectedresults: gpg key is associated with a single repository but not the product during creation and removed from repository after deletion :CaseLevel: Integration """ # Create product, repositories and gpg key product = make_product({'organization-id': self.org['id']}) repos = [] for _ in range(gen_integer(2, 5)): repos.append(make_repository({'product-id': product['id']})) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated with the repository repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key']['name'], gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key is not associated with any repository or the product # itself product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_synchronize_auth_puppet_repo(self): """Check if secured puppet repository can be created and synced @id: 1d2604fc-8a18-4cbe-bf4c-5c7d9fbdb82c @Assert: Repository is created and synced @CaseLevel: Integration """ url = FAKE_7_PUPPET_REPO for creds in [ cred for cred in valid_http_credentials(url_encoded=True) if cred['http_valid'] ]: url_encoded = url.format(creds['login'], creds['pass']) with self.subTest(url): new_repo = self._make_repository({ u'content-type': u'yum', u'url': url_encoded, }) # Assertion that repo is not yet synced self.assertEqual(new_repo['sync']['status'], 'Not Synced') # Synchronize it Repository.synchronize({'id': new_repo['id']}) # Verify it has finished new_repo = Repository.info({'id': new_repo['id']}) self.assertEqual(new_repo['sync']['status'], 'Success')
def test_positive_export_rh_product(self): """Export a repository from the Red Hat product @Feature: Repository - Export @Assert: Repository was successfully exported, rpm files are present on satellite machine """ # Enable RH repository with manifests.clone() as manifest: ssh.upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': self.org['id'], }) RepositorySet.enable({ 'basearch': 'x86_64', 'name': REPOSET['rhva6'], 'organization-id': self.org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'organization-id': self.org['id'], 'product': PRDS['rhel'], }) repo_export_dir = ( '/mnt/{0}/{1}-{2}-{3}/{1}/{4}/content/dist/rhel/server/6/6Server/' 'x86_64/rhev-agent/3/os'.format( self.export_dir, self.org['label'], PRDS['rhel'].replace(' ', '_'), repo['label'], ENVIRONMENT, )) # Update the download policy to 'immediate' Repository.update({ 'download-policy': 'immediate', 'id': repo['id'], }) # Export the repository Repository.export({'id': repo['id']}) # Verify export directory is empty result = ssh.command('ls -l {0} | grep .rpm'.format(repo_export_dir)) self.assertEqual(len(result.stdout), 0) # Synchronize the repository Repository.synchronize({'id': repo['id']}) # Export the repository once again Repository.export({'id': repo['id']}) # Verify RPMs were successfully exported result = ssh.command('ls -l {0} | grep .rpm'.format(repo_export_dir)) self.assertEqual(result.return_code, 0) self.assertGreaterEqual(len(result.stdout), 1)
def test_positive_update_url(self): """Update the original url for a repository @id: 1a2cf29b-5c30-4d4c-b6d1-2f227b0a0a57 @Assert: Repository url is updated """ new_repo = self._make_repository() # generate repo URLs with all valid credentials auth_repos = [ repo.format(creds['login'], creds['pass']) for creds in valid_http_credentials(url_encoded=True) for repo in (FAKE_5_YUM_REPO, FAKE_7_PUPPET_REPO) ] for url in [FAKE_4_YUM_REPO, FAKE_1_PUPPET_REPO, FAKE_2_PUPPET_REPO, FAKE_3_PUPPET_REPO, FAKE_2_YUM_REPO] + auth_repos: with self.subTest(url): # Update the url Repository.update({ u'id': new_repo['id'], u'url': url, }) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual(result['url'], url)
def validate_repo_content(self, repo, content_types, after_sync=True, max_attempts=10): """Check whether corresponding content is present in repository before or after synchronization is performed :param repo: Repository instance to be validated :param content_types: List of repository content entities that should be validated (e.g. package, erratum, puppet_module) :param bool after_sync: Specify whether you perform validation before synchronization procedure is happened or after :param int max_attempts: Specify how many times to check for content presence. Delay between each attempt is 10 seconds. Default is 10 attempts. """ for _ in range(max_attempts): try: repo = Repository.info({'id': repo['id']}) for content in content_types: if after_sync: self.assertGreater( int(repo['content-counts'][content]), 0) else: self.assertFalse(int(repo['content-counts'][content])) break except AssertionError: sleep(30) else: raise AssertionError( 'Repository contains invalid number of content entities')
def test_positive_delete_1(self, name): """@Test: Check if repository can be created and deleted @Feature: Repository @Assert: Repository is created and then deleted """ new_repo = self._make_repository({u'name': name}) # Assert that name matches data passed self.assertEqual(new_repo['name'], name, "Names don't match") # Delete it result = Repository.delete({u'id': new_repo['id']}) self.assertEqual(result.return_code, 0, "Repository was not deleted") self.assertEqual(len(result.stderr), 0, "No error was expected") # Fetch it result = Repository.info({ u'id': new_repo['id'], }) self.assertNotEqual(result.return_code, 0, "Repository should not be found") self.assertGreater(len(result.stderr), 0, "Expected an error here")
def test_positive_update_1(self, url): """@Test: Update the original url for a repository @Feature: Repository @Assert: Repository url is updated """ new_repo = self._make_repository() # Update the url result = Repository.update({ u'id': new_repo['id'], u'url': url, }) self.assertEqual(result.return_code, 0, "Repository was not updated") self.assertEqual(len(result.stderr), 0, "No error was expected") # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual(result.return_code, 0, "Repository was not found") self.assertEqual(len(result.stderr), 0, "No error was expected") self.assertNotEqual(result.stdout['url'], new_repo['url'], "Urls should not match") self.assertEqual(result.stdout['url'], url, "Urls don't match")
def test_positive_synchronize_2(self): """@Test: Check if Docker repository can be created and synced @Feature: Repository @Assert: Docker repository is created and synced """ new_repo = self._make_repository({ u'name': u'busybox', u'url': DOCKER_REGISTRY_HUB, u'content-type': u'docker', }) # Assertion that repo is not yet synced self.assertEqual(new_repo['sync']['status'], 'Not Synced') # Synchronize it result = Repository.synchronize({'id': new_repo['id']}) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) # Verify it has finished result = Repository.info({'id': new_repo['id']}) self.assertEqual(result.stdout['sync']['status'], 'Finished')
def test_positive_synchronize_1(self, test_data): """@Test: Check if repository can be created and synced @Feature: Repository @Assert: Repository is created and synced """ new_repo = self._make_repository({ u'url': test_data['url'], u'content-type': test_data['content-type'], }) # Assertion that repo is not yet synced self.assertEqual(new_repo['sync']['status'], 'Not Synced', "The status of repository should be 'Not Synced'") # Synchronize it result = Repository.synchronize({'id': new_repo['id']}) self.assertEqual(result.return_code, 0, "Repository was not synchronized") self.assertEqual(len(result.stderr), 0, "No error was expected") # Verify it has finished result = Repository.info({'id': new_repo['id']}) self.assertEqual(result.stdout['sync']['status'], 'Finished', "The new status of repository should be 'Finished'")
def test_positive_create_6(self, name): """@Test: Check if repository can be created with gpg key name @Feature: Repository @Assert: Repository is created and has gpg key @BZ: 1103944 """ # Make a new gpg key new_gpg_key = make_gpg_key({'organization-id': self.org['id']}) new_repo = self._make_repository({ u'name': name, u'gpg-key': new_gpg_key['name'], }) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual(result.return_code, 0, "Repository was not found") self.assertEqual(len(result.stderr), 0, "No error was expected") # Assert that data matches data passed self.assertEqual(result.stdout['gpg-key']['id'], new_gpg_key['id'], "GPG Keys ID don't match") self.assertEqual(result.stdout['gpg-key']['name'], new_gpg_key['name'], "GPG Keys name don't match")
def test_negative_update_auth_url_with_special_characters(self): """Verify that repository URL credentials cannot be updated to contain the forbidden characters @id: 566553b2-d077-4fd8-8ed5-00ba75355386 @Assert: Repository url not updated """ new_repo = self._make_repository() # get auth repos with credentials containing unquoted special chars auth_repos = [ repo.format(cred['login'], cred['pass']) for cred in valid_http_credentials() if cred['quote'] for repo in (FAKE_5_YUM_REPO, FAKE_7_PUPPET_REPO) ] for url in auth_repos: with self.subTest(url): with self.assertRaises(CLIReturnCodeError): Repository.update({ u'id': new_repo['id'], u'url': url, }) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual(result['url'], new_repo['url'])
def test_positive_package_count(self): """Check that packages count is correctly filtered by product id :id: 151f60a3-0b94-4658-8b0d-0d022f4f1d8f :expectedresults: Packages only from synced product returned :BZ: 1422552 :CaseLevel: Integration """ org = make_org() for _ in range(3): product = make_product({'organization-id': org['id']}) repo = make_repository({ 'product-id': product['id'], 'url': FAKE_0_YUM_REPO, }) Product.synchronize({ 'id': product['id'], 'organization-id': org['id'], }) packages = Package.list({'product-id': product['id']}) repo = Repository.info({'id': repo['id']}) self.assertEqual(int(repo['content-counts']['packages']), len(packages)) self.assertEqual(len(packages), FAKE_0_YUM_REPO_PACKAGES_COUNT)
def test_positive_synchronize_auth_yum_repo(self): """Check if secured repository can be created and synced @id: b0db676b-e0f0-428c-adf3-1d7c0c3599f0 @Assert: Repository is created and synced @CaseLevel: Integration """ url = FAKE_5_YUM_REPO for creds in [ cred for cred in valid_http_credentials(url_encoded=True) if cred['http_valid'] ]: url_encoded = url.format(creds['login'], creds['pass']) with self.subTest(url): new_repo = self._make_repository({ u'content-type': u'yum', u'url': url_encoded, }) # Assertion that repo is not yet synced self.assertEqual(new_repo['sync']['status'], 'Not Synced') # Synchronize it Repository.synchronize({'id': new_repo['id']}) # Verify it has finished new_repo = Repository.info({'id': new_repo['id']}) self.assertEqual(new_repo['sync']['status'], 'Success')
def test_negative_update_auth_url_too_long(self): """Update the original url for a repository to value which is too long @id: a703de60-8631-4e31-a9d9-e51804f27f03 @Assert: Repository url not updated """ new_repo = self._make_repository() # generate repo URLs with all invalid credentials auth_repos = [ repo.format(cred['login'], cred['pass']) for cred in invalid_http_credentials() for repo in (FAKE_5_YUM_REPO, FAKE_7_PUPPET_REPO) ] for url in auth_repos: with self.subTest(url): with self.assertRaises(CLIReturnCodeError): Repository.update({ u'id': new_repo['id'], u'url': url, }) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual(result['url'], new_repo['url'])
def test_positive_add_custom_product(self): """Test that custom product can be associated to Activation Keys :id: 96ace967-e165-4069-8ff7-f54c4c822de0 :expectedresults: Custom products are successfully associated to Activation key :CaseLevel: System :BZ: 1426386 """ result = setup_org_for_a_custom_repo({ u'url': FAKE_0_YUM_REPO, u'organization-id': self.org['id'], }) repo = Repository.info({u'id': result['repository-id']}) content = ActivationKey.product_content({ u'id': result['activationkey-id'], u'organization-id': self.org['id'], }) self.assertEqual(content[0]['name'], repo['name'])
def create(self, organization_id, product_id=None, download_policy=DOWNLOAD_POLICY_ON_DEMAND, synchronize=True): # type: (int, Optional[int], Optional[str], Optional[bool]) -> Dict """Create an RH repository""" if not self.cdn and not self.url: raise ValueError('Can not handle Custom repository with url not supplied') if self.cdn: data = self.data RepositorySet.enable({ 'organization-id': organization_id, 'product': data['product'], 'name': data['repository-set'], 'basearch': data.get('arch', DEFAULT_ARCHITECTURE), 'releasever': data.get('releasever'), }) repo_info = Repository.info({ 'organization-id': organization_id, 'name': data['repository'], 'product': data['product'], }) if download_policy: # Set download policy Repository.update({ 'download-policy': download_policy, 'id': repo_info['id'], }) self._repo_info = repo_info if synchronize: self.synchronize() else: repo_info = super(GenericRHRepository, self).create( organization_id, product_id, download_policy=download_policy) return repo_info
def test_positive_add_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository :id: 5529a852-9ef6-48f8-b2bc-2bbf463657dd :expectedresults: gpg key is associated with product as well as with the repository :CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'] }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'] }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_add_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository @id: 1427f145-9faf-41ef-ae42-dc91d61ce1f6 @assert: gpg key is associated with the repository but not with the product @CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def test_positive_add_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository :id: 1427f145-9faf-41ef-ae42-dc91d61ce1f6 :expectedresults: gpg key is associated with the repository but not with the product :CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'] }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'] }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def test_positive_update_url(self): """Update the original url for a repository @id: 1a2cf29b-5c30-4d4c-b6d1-2f227b0a0a57 @Assert: Repository url is updated """ new_repo = self._make_repository() # generate repo URLs with all valid credentials auth_repos = [ repo.format(creds['login'], creds['pass']) for creds in valid_http_credentials(url_encoded=True) for repo in (FAKE_5_YUM_REPO, FAKE_7_PUPPET_REPO) ] for url in [ FAKE_4_YUM_REPO, FAKE_1_PUPPET_REPO, FAKE_2_PUPPET_REPO, FAKE_3_PUPPET_REPO, FAKE_2_YUM_REPO ] + auth_repos: with self.subTest(url): # Update the url Repository.update({ u'id': new_repo['id'], u'url': url, }) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual(result['url'], url)
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. @id: 6ce2f777-f230-4bb8-9822-2cf3580c21aa @Assert: Product is synchronized successfully. @CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)).strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Verify product is not synced and doesn't have any content self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(delay / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_add_repo_from_product_with_repo(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository :id: da568a0e-69b1-498e-a747-6881aac7409e :expectedresults: gpg key is associated with the repository but not with the product :CaseLevel: Integration """ product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) gpg_key = make_content_credential({'organization-id': module_org.id}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': module_org.id }) product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) repo = Repository.info({'id': repo['id']}) assert repo['gpg-key']['id'] == gpg_key['id'] assert product['gpg'].get('gpg-key-id') != gpg_key['id']
def test_positive_update_1(self, url): """@Test: Update the original url for a repository @Feature: Repository @Assert: Repository url is updated """ new_repo = self._make_repository() # Update the url result = Repository.update({ u'id': new_repo['id'], u'url': url, }) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) self.assertNotEqual(result.stdout['url'], new_repo['url']) self.assertEqual(result.stdout['url'], url)
def test_positive_list_multiple_repos(self): """Verify that puppet-modules list for specific repo is correct and does not affected by other repositories. :id: f36d25b3-2495-4e89-a1cf-e39d52762d95 :expectedresults: Number of modules has no changed after a second repo was synced. :CaseImportance: Critical """ # Verify that number of synced modules is correct repo1 = Repository.info({'id': self.repo['id']}) repo_content_count = repo1['content-counts']['puppet-modules'] modules_num = len(PuppetModule.list({'repository-id': repo1['id']})) self.assertEqual(repo_content_count, str(modules_num)) # Create and sync second repo repo2 = make_repository({ u'organization-id': self.org['id'], u'product-id': self.product['id'], u'content-type': u'puppet', u'url': FAKE_1_PUPPET_REPO, }) Repository.synchronize({'id': repo2['id']}) # Verify that number of modules from the first repo has not changed self.assertEqual( modules_num, len(PuppetModule.list({'repository-id': repo1['id']})))
def test_positive_add_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository @id: 5529a852-9ef6-48f8-b2bc-2bbf463657dd @assert: gpg key is associated with product as well as with the repository @CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_add_product_with_repo(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository :id: f315eadd-e65b-4952-912f-f640867ad656 :expectedresults: gpg key is associated with product as well as with the repository :CaseLevel: Integration """ product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) gpg_key = make_content_credential({'organization-id': module_org.id}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': module_org.id }) product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) repo = Repository.info({'id': repo['id']}) assert product['gpg']['gpg-key-id'] == gpg_key['id'] assert repo['gpg-key']['id'] == gpg_key['id']
def test_positive_add_product_with_repos(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository :id: 76683f3e-7705-4719-996e-c026839053bb :expectedresults: gpg key is associated with product as well as with the repositories :CaseLevel: Integration """ product = make_product({'organization-id': module_org.id}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_content_credential({'organization-id': module_org.id}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': module_org.id }) product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key-id'] == gpg_key['id'] for repo in repos: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key']['id'] == gpg_key['id']
def test_positive_add_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository :id: b05c5223-44d5-4a48-9d99-18ca351c84a5 :expectedresults: gpg key is associated with product as well as with the repositories :CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'] }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'] }) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_list_multiple_repos(self): """Verify that puppet-modules list for specific repo is correct and does not affected by other repositories. @id: f36d25b3-2495-4e89-a1cf-e39d52762d95 @Assert: Number of modules has no changed after a second repo was synced. """ # Verify that number of synced modules is correct repo1 = Repository.info({'id': self.repo['id']}) repo_content_count = repo1['content-counts']['puppet-modules'] modules_num = len( PuppetModule.list({'repository-id': repo1['id']})) self.assertEqual(repo_content_count, str(modules_num)) # Create and sync second repo repo2 = make_repository({ u'organization-id': self.org['id'], u'product-id': self.product['id'], u'content-type': u'puppet', u'url': FAKE_1_PUPPET_REPO, }) Repository.synchronize({'id': repo2['id']}) # Verify that number of modules from the first repo has not changed self.assertEqual( modules_num, len(PuppetModule.list({'repository-id': repo1['id']})) )
def test_positive_update_checksum_type(self): """Create a YUM repository and update the checksum type @Feature: Repository @Assert: A YUM repository is updated and contains the correct checksum type @BZ: 1208305 """ content_type = u'yum' repository = self._make_repository({ u'content-type': content_type }) self.assertEqual(repository['content-type'], content_type) self.assertEqual(repository['checksum-type'], '') for checksum_type in u'sha1', u'sha256': with self.subTest(checksum_type): # Update the checksum Repository.update({ u'checksum-type': checksum_type, u'id': repository['id'], }) # Fetch it again result = Repository.info({'id': repository['id']}) self.assertEqual(result['checksum-type'], checksum_type)
def test_add_synced_docker_repo_to_content_view(self): """@Test: Create and sync a Docker-type repository @Assert: A repository is created with a Docker repository and it is synchronized. @Feature: Docker """ repo = _make_docker_repo( make_product({'organization-id': self.org_id})['id']) result = Repository.synchronize({'id': repo['id']}) self.assertEqual(result.return_code, 0) repo = Repository.info({'id': repo['id']}).stdout self.assertGreaterEqual( int(repo['content-counts']['docker-images']), 1) content_view = make_content_view({ 'composite': False, 'organization-id': self.org_id, }) result = ContentView.add_repository({ 'id': content_view['id'], 'repository-id': repo['id'], }) self.assertEqual(result.return_code, 0) content_view = ContentView.info({'id': content_view['id']}).stdout self.assertIn( repo['id'], [repo_['id'] for repo_ in content_view['docker-repositories']], )
def make_file_repository_upload_contents(self, options=None): """Makes a new File repository, Upload File/Multiple Files and asserts its success. """ if options is None: options = { 'name': self.file_repo_name, 'product-id': self.product['id'], 'content-type': 'file', } if not options.get('content-type'): raise CLIFactoryError('Please provide a valid Content Type.') file_repo = make_repository(options) remote_path = "/tmp/{0}".format(RPM_TO_UPLOAD) if 'multi_upload' not in options or not options['multi_upload']: ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file=remote_path) else: remote_path = "/tmp/{}/".format(gen_string('alpha')) ssh.upload_files(local_dir=os.getcwd() + "/../data/", remote_dir=remote_path) result = Repository.upload_content( { 'name': file_repo['name'], 'organization': file_repo['organization'], 'path': remote_path, 'product-id': file_repo['product']['id'], } ) self.assertIn( "Successfully uploaded file '{0}'".format(RPM_TO_UPLOAD), result[0]['message'] ) file_repo = Repository.info({'id': file_repo['id']}) self.assertGreater(int(file_repo['content-counts']['files']), 0) return file_repo
def validate_repo_content( self, repo, content_types, after_sync=True, max_attempts=10): """Check whether corresponding content is present in repository before or after synchronization is performed :param repo: Repository instance to be validated :param content_types: List of repository content entities that should be validated (e.g. package, erratum, puppet_module) :param bool after_sync: Specify whether you perform validation before synchronization procedure is happened or after :param int max_attempts: Specify how many times to check for content presence. Delay between each attempt is 10 seconds. Default is 10 attempts. """ for _ in range(max_attempts): try: repo = Repository.info({'id': repo['id']}) for content in content_types: if after_sync: self.assertGreater( int(repo['content-counts'][content]), 0) else: self.assertFalse(int(repo['content-counts'][content])) break except AssertionError: sleep(30) else: raise AssertionError( 'Repository contains invalid number of content entities')
def test_positive_add_repo_from_product_with_repo(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository @feature: GPG Keys @assert: gpg key is associated with the repository but not with the product """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def test_positive_synchronize_1(self, test_data): """@Test: Check if repository can be created and synced @Feature: Repository @Assert: Repository is created and synced """ new_repo = self._make_repository({ u'url': test_data['url'], u'content-type': test_data['content-type'], }) # Assertion that repo is not yet synced self.assertEqual( new_repo['sync']['status'], 'Not Synced', "The status of repository should be 'Not Synced'") # Synchronize it result = Repository.synchronize({'id': new_repo['id']}) self.assertEqual( result.return_code, 0, "Repository was not synchronized") self.assertEqual( len(result.stderr), 0, "No error was expected") # Verify it has finished result = Repository.info({'id': new_repo['id']}) self.assertEqual( result.stdout['sync']['status'], 'Finished', "The new status of repository should be 'Finished'")
def test_positive_add_product_with_repos(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository @feature: GPG Keys @assert: gpg key is associated with product as well as with the repositories """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_create_8(self, use_http): """@Test: Create repository not published via http @Feature: Repository @Assert: Repository is created and is not published via http """ new_repo = self._make_repository({'publish-via-http': use_http}) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual( result.return_code, 0, "Repository was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") self.assertEqual( result.stdout['publish-via-http'], u'no', "Publishing methods don't match" )
def test_positive_create_5(self, name): """@Test: Check if repository can be created with gpg key ID @Feature: Repository @Assert: Repository is created and has gpg key """ # Make a new gpg key new_gpg_key = make_gpg_key({'organization-id': self.org['id']}) new_repo = self._make_repository({ u'name': name, u'gpg-key-id': new_gpg_key['id'], }) # Fetch it again result = Repository.info({'id': new_repo['id']}) self.assertEqual( result.return_code, 0, "Repository was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") # Assert that data matches data passed self.assertEqual( result.stdout['gpg-key']['id'], new_gpg_key['id'], "GPG Keys ID don't match" ) self.assertEqual( result.stdout['gpg-key']['name'], new_gpg_key['name'], "GPG Keys name don't match" )
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) Subscription.upload({ 'file': manifest.filename, 'organization-id': org['id'], }) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) RepositorySet.enable({ 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', 'basearch': 'x86_64', }) product = Product.info({ 'name': PRDS['rhel'], 'organization-id': org['id'], }) repo = Repository.info({ 'name': REPOS['rhva6']['name'], 'product': product['name'], 'organization-id': org['id'], }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['errata', 'packages'])
def test_positive_delete_key_for_repo_from_product_with_repo(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository then delete the key :id: 2555b08f-8cee-4e84-8f4d-9b46743f5758 :expectedresults: gpg key is associated with the single repository but not the product during creation and was removed from repository after deletion :CaseLevel: Integration """ # Create product, repository and gpg key product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) gpg_key = make_content_credential({'organization-id': module_org.id}) # Associate gpg key with a repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repo['id'], 'organization-id': module_org.id }) # Verify gpg key was associated with the repository but not with the # product product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) repo = Repository.info({'id': repo['id']}) assert product['gpg']['gpg-key'] != gpg_key['name'] assert repo['gpg-key'].get('name') == gpg_key['name'] # Delete the gpg key ContentCredential.delete({ 'name': gpg_key['name'], 'organization-id': module_org.id }) # Verify gpg key was actually deleted with pytest.raises(CLIReturnCodeError): ContentCredential.info({ 'id': gpg_key['id'], 'organization-id': module_org.id }) # Verify gpg key was disassociated from the repository repo = Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') != gpg_key['name']
def test_positive_delete_key_for_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository then delete the key :id: 3658e04d-fc63-499f-a22d-b512941cc96b :expectedresults: gpg key is associated with the single repository but not the product during creation and was removed from repository after deletion :CaseLevel: Integration """ # Create product, repository and gpg key product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repo['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated with the repository but not with the # product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was disassociated from the repository repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_delete_ostree_by_name(self): """Delete Ostree repository by name @id: 0b545c22-acff-47b6-92ff-669b348f9fa6 @Assert: Repository is deleted by name """ new_repo = self._make_repository({ u'content-type': u'ostree', u'publish-via-http': u'false', u'url': FEDORA23_OSTREE_REPO, }) Repository.delete({ u'name': new_repo['name'], u'product': new_repo['product']['name'], u'organization': new_repo['organization'] }) with self.assertRaises(CLIReturnCodeError): Repository.info({u'name': new_repo['name']})
def test_positive_add_redhat_and_custom_products(self): """Test if RH/Custom product can be associated to Activation key :id: 74c77426-18f5-4abb-bca9-a2135f7fcc1f :Steps: 1. Create Activation key 2. Associate RH product(s) to Activation Key 3. Associate custom product(s) to Activation Key :expectedresults: RH/Custom product is successfully associated to Activation key :CaseLevel: System :BZ: 1426386 """ org = make_org() # Using CDN as we need this repo to be RH one no matter are we in # downstream or cdn result = setup_org_for_a_rh_repo( { u'product': PRDS['rhel'], u'repository-set': REPOSET['rhst7'], u'repository': REPOS['rhst7']['name'], u'organization-id': org['id'], }, force_use_cdn=True) result = setup_org_for_a_custom_repo({ u'url': FAKE_0_YUM_REPO, u'organization-id': org['id'], u'activationkey-id': result['activationkey-id'], u'content-view-id': result['content-view-id'], u'lifecycle-environment-id': result['lifecycle-environment-id'], }) repo = Repository.info({u'id': result['repository-id']}) content = ActivationKey.product_content({ u'id': result['activationkey-id'], u'organization-id': org['id'], }) self.assertEqual(len(content), 2) self.assertEqual({REPOSET['rhst7'], repo['name']}, {pc['name'] for pc in content})
def create( self, organization_id, product_id=None, download_policy=DOWNLOAD_POLICY_ON_DEMAND, synchronize=True, ): # type: (int, Optional[int], Optional[str], Optional[bool]) -> Dict """Create an RH repository""" if not self.cdn and not self.url: raise ValueError( 'Can not handle Custom repository with url not supplied') if self.cdn: data = self.data if not Repository.list({ 'organization-id': organization_id, 'name': data['repository'], 'product': data['product'], }): RepositorySet.enable({ 'organization-id': organization_id, 'product': data['product'], 'name': data['repository-set'], 'basearch': data.get('arch', constants.DEFAULT_ARCHITECTURE), 'releasever': data.get('releasever'), }) repo_info = Repository.info({ 'organization-id': organization_id, 'name': data['repository'], 'product': data['product'], }) if download_policy: # Set download policy Repository.update({ 'download-policy': download_policy, 'id': repo_info['id'] }) self._repo_info = repo_info if synchronize: self.synchronize() else: repo_info = super().create(organization_id, product_id, download_policy=download_policy) return repo_info
def validate_repo_content(repo, content_types, after_sync=True): """Check whether corresponding content is present in repository before or after synchronization is performed :param repo: Repository instance to be validated :param content_types: List of repository content entities that should be validated (e.g. package, erratum, puppet_module) :param bool after_sync: Specify whether you perform validation before synchronization procedure is happened or after """ repo = Repository.info({'id': repo['id']}) for content in content_types: count = int(repo['content-counts'][content]) assert count > 0 if after_sync else count == 0