def test_negative_create_1(self): """@Test: Check that only valid names can be used @Feature: Product @Assert: Product is not created """ for invalid_name in invalid_values_list(): with self.subTest(invalid_name): with self.assertRaises(CLIFactoryError): make_product({u"name": invalid_name, u"organization-id": self.org["id"]})
def test_negative_create_2(self): """@Test: Check that only valid labels can be used @Feature: Product @Assert: Product is not created """ product_name = gen_alphanumeric() for invalid_label in (gen_string("latin1", 15), gen_string("utf8", 15), gen_string("html", 15)): with self.subTest(invalid_label): with self.assertRaises(CLIFactoryError): make_product({u"label": invalid_label, u"name": product_name, u"organization-id": self.org["id"]})
def test_negative_create_1(self, test_name): """ @Test: Check that only valid names can be used @Feature: Product @Assert: Product is not created """ with self.assertRaises(CLIFactoryError): make_product( { u'name': test_name['name'], u'organization-id': self.org['id'] } )
def test_negative_create_with_name(self): """Check that only valid names can be used @id: 2da26ab2-8d79-47ea-b4d2-defcd98a0649 @Assert: Product is not created """ for invalid_name in invalid_values_list(): with self.subTest(invalid_name): with self.assertRaises(CLIFactoryError): make_product({ u'name': invalid_name, u'organization-id': self.org['id'], })
def test_negative_create_2(self, test_name): """ @Test: Check that only valid labels can be used @Feature: Product @Assert: Product is not created """ with self.assertRaises(Exception): make_product( { u'name': test_name['name'], u'label': test_name['label'], u'organization-id': self.org['id'] } )
def test_positive_add_repo_from_product_with_repo(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository @feature: GPG Keys @assert: gpg key is associated with the repository but not with the product """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def test_add_syncplan_1(self): """@Test: Check if product can be assigned a syncplan @Feature: Product @Assert: Product has syncplan """ try: new_product = make_product({ u'organization-id': self.org['id'] }) sync_plan = make_sync_plan({'organization-id': self.org['id']}) except CLIFactoryError as err: self.fail(err) result = Product.set_sync_plan({ 'sync-plan-id': sync_plan['id'], 'id': new_product['id'], }) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) result = Product.info({ 'id': new_product['id'], 'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) self.assertEqual(result.stdout['sync-plan-id'], sync_plan['id'])
def test_product_synchronize_by_label(self): """@Test: Check if product can be synchronized. Searches for organization by its label @Feature: Product @Assert: Product was synchronized """ try: org = make_org() product = make_product({'organization-id': org['id']}) make_repository({'product-id': product['id']}) except CLIFactoryError as err: self.fail(err) result = Product.synchronize({ 'id': product['id'], 'organization-label': org['label'], }) self.assertEqual(result.return_code, 0) result = Product.info({ 'id': product['id'], 'organization-id': org['id'], }) self.assertEqual(result.return_code, 0) self.assertEqual(u'Syncing Complete.', result.stdout['sync-state'])
def test_positive_add_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository @id: 1427f145-9faf-41ef-ae42-dc91d61ce1f6 @assert: gpg key is associated with the repository but not with the product @CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def test_positive_add_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository @id: 5529a852-9ef6-48f8-b2bc-2bbf463657dd @assert: gpg key is associated with product as well as with the repository @CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_remove_sync_plan_by_id(self): """Check if a sync plan can be removed from a product @id: 0df2005c-158a-48cb-8a16-9a63923699fc @Assert: Product has sync plan @CaseLevel: Integration """ product = make_product({u'organization-id': self.org['id']}) sync_plan = make_sync_plan({'organization-id': self.org['id']}) Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['sync-plan-id'], sync_plan['id']) Product.remove_sync_plan({'id': product['id']}) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(len(product['sync-plan-id']), 0)
def set_importing_org(self, product, repo, cv): """Sets same CV, product and repository in importing organization as exporting organization :param str product: The product name same as exporting product :param str repo: The repo name same as exporting repo :param str cv: The cv name same as exporting cv """ self.importing_org = make_org() importing_prod = make_product({ 'organization-id': self.importing_org['id'], 'name': product }) importing_repo = make_repository({ 'name': repo, 'download-policy': 'immediate', 'product-id': importing_prod['id'] }) self.importing_cv = make_content_view({ 'name': cv, 'organization-id': self.importing_org['id'] }) ContentView.add_repository({ 'id': self.importing_cv['id'], 'organization-id': self.importing_org['id'], 'repository-id': importing_repo['id'] })
def test_positive_add_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository :id: b05c5223-44d5-4a48-9d99-18ca351c84a5 :expectedresults: gpg key is associated with product as well as with the repositories :CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_update_gpg_key(self): """Update product's gpg keys @id: e7febd14-ac8b-424e-9ddf-bf0f63ebe430 @Assert: Product gpg key is updated """ first_gpg_key = make_gpg_key({u'organization-id': self.org['id']}) second_gpg_key = make_gpg_key({u'organization-id': self.org['id']}) product = make_product({ u'gpg-key-id': first_gpg_key['id'], u'organization-id': self.org['id'], }) # Update the Descriptions Product.update({ u'gpg-key-id': second_gpg_key['id'], u'id': product['id'], }) # Fetch it product = Product.info({ u'id': product['id'], u'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key-id'], second_gpg_key['id']) self.assertNotEqual(product['gpg']['gpg-key-id'], first_gpg_key['id'])
def test_positive_synchronize_custom_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a custom product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': self.org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'])
def test_positive_add_repo_from_product_with_repos(self): """@test: Create gpg key via file import and associate with custom repo GPGKey should contain valid name and valid key and should be associated to one repository from custom product. Make sure custom product should have more than one repository. @feature: GPG Keys @assert: gpg key is associated with the repository """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id']) # First repo should have a valid gpg key assigned repo = Repository.info({'id': repos.pop(0)['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) # The rest of repos should not for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('id'), gpg_key['id'])
def test_positive_delete_key_for_empty_product(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it with empty (no repos) custom product then delete it @feature: GPG Keys @assert: gpg key is associated with product during creation but removed from product after deletion """ # Create a product and a gpg key gpg_key = make_gpg_key({'organization-id': self.org['id']}) product = make_product({ 'gpg-key-id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was disassociated from the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name'])
def test_negative_synchronize_custom_product_current_sync_date(self): """Verify product won't get synced immediately after adding association with a sync plan which has already been started :id: c80f5c0c-3863-47da-8d7b-7d65c73664b0 :expectedresults: Repository was not synchronized :BZ: 1279539 :CaseLevel: System """ sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': self.org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) with self.assertRaises(AssertionError): self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'], max_attempts=5, )
def test_positive_create_4(self, test_name): """ @Test: Check if product can be created with gpg key @Feature: Product @Assert: Product is created and has gpg key @BZ: 1096320 """ new_gpg_key = make_gpg_key( {u'organization-id': self.org['id']} ) new_product = make_product( { u'name': test_name['name'], u'organization-id': self.org['id'], u'gpg-key-id': new_gpg_key['id'], } ) # Fetch it result = Product.info( {u'id': new_product['id'], u'organization-id': self.org['id']}) self.assertEqual( result.return_code, 0, "Product was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") self.assertEqual( result.stdout['name'], new_product['name'], "Names don't match") self.assertEqual( result.stdout['gpg']['gpg-key-id'], new_gpg_key['id'], "GPG Keys don't match")
def test_positive_synchronize_custom_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one custom product with it automatically. @Assert: Product is synchronized successfully. @Feature: SyncPlan """ delay = 10 * 60 # delay for sync date in seconds sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': self.org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)) .strftime("%Y-%m-%d %H:%M:%S"), }) product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) # Verify product is not synced and doesn't have any content self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(delay/2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay/2) # Verify product was synced successfully self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'])
def test_positive_add_product_with_repos(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository @feature: GPG Keys @assert: gpg key is associated with product as well as with the repositories """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_add_synced_docker_repo_to_content_view(self): """@Test: Create and sync a Docker-type repository @Assert: A repository is created with a Docker repository and it is synchronized. @Feature: Docker """ repo = _make_docker_repo( make_product({'organization-id': self.org_id})['id']) result = Repository.synchronize({'id': repo['id']}) self.assertEqual(result.return_code, 0) repo = Repository.info({'id': repo['id']}).stdout self.assertGreaterEqual( int(repo['content-counts']['docker-images']), 1) content_view = make_content_view({ 'composite': False, 'organization-id': self.org_id, }) result = ContentView.add_repository({ 'id': content_view['id'], 'repository-id': repo['id'], }) self.assertEqual(result.return_code, 0) content_view = ContentView.info({'id': content_view['id']}).stdout self.assertIn( repo['id'], [repo_['id'] for repo_ in content_view['docker-repositories']], )
def test_add_multiple_docker_repos_to_content_view(self): """@Test: Add multiple Docker-type repositories to a non-composite content view. @Assert: Repositories are created with Docker images and the product is added to a non-composite content view. @Feature: Docker """ product = make_product({'organization-id': self.org_id}) repos = [ _make_docker_repo(product['id']) for _ in range(randint(2, 5)) ] content_view = make_content_view({ 'composite': False, 'organization-id': self.org_id, }) for repo in repos: result = ContentView.add_repository({ 'id': content_view['id'], 'repository-id': repo['id'], }) self.assertEqual(result.return_code, 0) content_view = ContentView.info({'id': content_view['id']}).stdout self.assertEqual( set([repo['id'] for repo in repos]), set([repo['id'] for repo in content_view['docker-repositories']]), )
def test_positive_update_sync_plan(self): """Update product's sync plan @id: 78cbde49-b6c8-41ab-8991-fcb4b648e79b @Assert: Product sync plan is updated """ first_sync_plan = make_sync_plan({u'organization-id': self.org['id']}) second_sync_plan = make_sync_plan({u'organization-id': self.org['id']}) product = make_product({ u'organization-id': self.org['id'], u'sync-plan-id': first_sync_plan['id'], }) # Update the Descriptions Product.update({ u'id': product['id'], u'sync-plan-id': second_sync_plan['id'], }) # Fetch it product = Product.info({ u'id': product['id'], u'organization-id': self.org['id'], }) self.assertEqual(product['sync-plan-id'], second_sync_plan['id']) self.assertNotEqual(product['sync-plan-id'], first_sync_plan['id'])
def test_add_docker_repo_to_content_view(self): """@Test: Add one Docker-type repository to a non-composite content view @Assert: A repository is created with a Docker repository and the product is added to a non-composite content view @Feature: Docker """ repo = _make_docker_repo( make_product({'organization-id': self.org_id})['id']) content_view = make_content_view({ 'composite': False, 'organization-id': self.org_id, }) result = ContentView.add_repository({ 'id': content_view['id'], 'repository-id': repo['id'], }) self.assertEqual(result.return_code, 0) content_view = ContentView.info({'id': content_view['id']}).stdout self.assertIn( repo['id'], [repo_['id'] for repo_ in content_view['docker-repositories']], )
def test_delete_random_docker_repo(self): """@Test: Create Docker-type repositories on multiple products and delete a random repository from a random product. @Assert: Random repository can be deleted from random product without altering the other products. @Feature: Docker """ products = [ make_product({'organization-id': self.org_id}) for _ in range(randint(2, 5)) ] repos = [] for product in products: for _ in range(randint(2, 3)): repos.append(_make_docker_repo(product['id'])) # Select random repository and delete it repo = choice(repos) repos.remove(repo) result = Repository.delete({'id': repo['id']}) self.assertEqual(result.return_code, 0) result = Repository.info({'id': repo['id']}) self.assertNotEqual(result.return_code, 0) # Verify other repositories were not touched for repo in repos: result = Repository.info({'id': repo['id']}) self.assertEqual(result.return_code, 0) self.assertIn( result.stdout['product']['id'], [product['id'] for product in products], )
def test_positive_update_1(self, test_data): """@Test: Update the description of a product @Feature: Product @Assert: Product description is updated """ product = make_product({ u'organization-id': self.org['id'], }) # Update the Descriptions result = Product.update({ u'id': product['id'], u'description': test_data['description'], }) # Fetch it result = Product.info({ u'id': product['id'], u'organization-id': self.org['id'], }) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) self.assertEqual( result.stdout['description'], test_data['description']) self.assertNotEqual( product['description'], result.stdout['description'])
def setUpClass(cls): """Create Directory for all CV Sync Tests in /tmp""" super(ContentViewSync, cls).setUpClass() cls.exporting_org = make_org() cls.exporting_prod = gen_string('alpha') product = make_product({ 'organization-id': cls.exporting_org['id'], 'name': cls.exporting_prod }) cls.exporting_repo = gen_string('alpha') repo = make_repository({ 'name': cls.exporting_repo, 'download-policy': 'immediate', 'product-id': product['id'] }) Repository.synchronize({'id': repo['id']}) cls.exporting_cv = gen_string('alpha') content_view = make_content_view({ 'name': cls.exporting_cv, 'organization-id': cls.exporting_org['id'] }) ContentView.add_repository({ 'id': content_view['id'], 'organization-id': cls.exporting_org['id'], 'repository-id': repo['id'] }) ContentView.publish({u'id': content_view['id']}) content_view = ContentView.info({u'id': content_view['id']}) cls.exporting_cvv_id = content_view['versions'][0]['id']
def test_positive_delete_1(self): """@Test: Check if product can be deleted @Feature: Product @Assert: Product is deleted """ new_product = make_product({ u'organization-id': self.org['id'] }) # Delete it result = Product.delete({u'id': new_product['id']}) self.assertEqual(result.return_code, 0) self.assertEqual(len(result.stderr), 0) # Fetch it result = Product.info({ u'id': new_product['id'], u'organization-id': self.org['id'], }) if bz_bug_is_open(1219490): for _ in range(5): if result.return_code == 0: time.sleep(5) result = Product.info({ u'id': new_product['id'], u'organization-id': self.org['id'], }) else: break self.assertNotEqual(result.return_code, 0) self.assertGreater(len(result.stderr), 0)
def test_positive_create_2(self, test_name): """ @Test: Check if product can be created with random labels @Feature: Product @Assert: Product is created and has random label @BZ: 1096320 """ new_product = make_product( { u'name': test_name['name'], u'label': test_name['label'], u'organization-id': self.org['id'] } ) # Fetch it result = Product.info( {u'id': new_product['id'], u'organization-id': self.org['id']}) self.assertEqual( result.return_code, 0, "Product was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") self.assertEqual( result.stdout['name'], new_product['name'], "Names don't match") self.assertEqual( result.stdout['label'], new_product['label'], "Labels don't match" )
def test_positive_synchronize_custom_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one custom product with it automatically. :id: 635bffe2-df98-4971-8950-40edc89e479e :expectedresults: Product is synchronized successfully. :CaseLevel: System :BZ: 1655595 """ delay = 2 * 60 # delay for sync date in seconds product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': self.org['id'], 'sync-date': (datetime.utcnow().replace(second=0) + timedelta(seconds=delay)).strftime("%Y-%m-%d %H:%M:%S"), 'cron-expression': ["*/4 * * * *"], }) # Verify product is not synced and doesn't have any content self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait quarter of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay / 4, product['name'])) sleep(delay / 4) # Verify product has not been synced yet with self.assertRaises(AssertionError): self.validate_task_status(repo['id'], max_tries=1) self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format((delay * 3 / 4), product['name'])) sleep(delay * 3 / 4) # Verify product was synced successfully self.validate_task_status(repo['id'], repo_name=repo['name']) self.validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_synchronize_custom_product_past_sync_date(self): """Create a sync plan with a past datetime as a sync date, add a custom product and verify the product gets synchronized on the next sync occurrence :id: 21efdd08-698c-443c-a681-edce19a4c83a :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': self.org['id'], 'sync-date': (datetime.utcnow() - timedelta(seconds=interval - delay) ).strftime("%Y-%m-%d %H:%M:%S"), }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait quarter of expected time self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay / 4, product['name'])) sleep(delay / 4) # Verify product has not been synced yet with self.assertRaises(AssertionError): self.validate_task_status(repo['id'], max_tries=1) self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format((delay * 3 / 4), product['name'])) sleep(delay * 3 / 4) # Verify product was synced successfully self.validate_task_status(repo['id'], repo_name=repo['name']) self.validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_synchronize_custom_products_future_sync_date(self): """Create a sync plan with sync date in a future and sync multiple custom products with multiple repos automatically. :id: dd262cf3-b836-422c-baca-b3adbc532478 :expectedresults: Products are synchronized successfully. :CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': self.org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)).strftime("%Y-%m-%d %H:%M:%S"), }) products = [ make_product({'organization-id': self.org['id']}) for _ in range(randint(3, 5)) ] repos = [ make_repository({'product-id': product['id']}) for product in products for _ in range(randint(2, 3)) ] # Verify products have not been synced yet for repo in repos: self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with products for product in products: Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(delay / 2) # Verify products has not been synced yet for repo in repos: self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully for repo in repos: self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'])
def test_positive_synchronize_custom_product_past_sync_date(module_org): """Create a sync plan with a past datetime as a sync date, add a custom product and verify the product gets synchronized on the next sync occurrence :id: 21efdd08-698c-443c-a681-edce19a4c83a :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) sync_plan = make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': module_org.id, 'sync-date': (datetime.utcnow() - timedelta(seconds=interval - delay)).strftime(SYNC_DATE_FMT), }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'] }) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" f" was not synced by {sync_plan['name']}") sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(repo['id'], module_org.id, max_tries=1) validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" f" was synced by {sync_plan['name']}") sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(repo['id'], module_org.id) validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_synchronize_custom_product_daily_recurrence(self): """Create a daily sync plan with a past datetime as a sync date, add a custom product and verify the product gets synchronized on the next sync occurrence :id: 8d882e8b-b5c1-4449-81c6-0efd31ad75a7 :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 5 * 60 product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) start_date = datetime.utcnow() - timedelta(days=1)\ + timedelta(seconds=delay) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'daily', 'organization-id': self.org['id'], 'sync-date': start_date.strftime("%Y-%m-%d %H:%M:%S"), }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Verify product has not been synced yet self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay/4, product['name'])) sleep(delay/4) with self.assertRaises(AssertionError): self.validate_task_status(repo['id'], max_tries=2) self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format(delay, product['name'])) sleep(delay * 3/4) # Re-calculate and Update with the current UTC time start_date = datetime.utcnow() - timedelta(days=1)\ + timedelta(seconds=delay) SyncPlan.update({ u'id': sync_plan['id'], u'sync-date': start_date.strftime("%Y-%m-%d %H:%M:%S"), }) # Verify product was synced successfully self.validate_task_status(repo['id'], repo_name=repo['name']) self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'])
def setUpClass(cls): """Init single organization, product and repository for all tests""" super(ContentViewFilterTestCase, cls).setUpClass() cls.org = make_org() cls.product = make_product({u'organization-id': cls.org['id']}) cls.repo = make_repository({u'product-id': cls.product['id']}) Repository.synchronize({u'id': cls.repo['id']}) cls.content_view = make_content_view({ u'organization-id': cls.org['id'], }) ContentView.add_repository({ u'id': cls.content_view['id'], u'repository-id': cls.repo['id'], })
def test_positive_delete_key_for_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository then delete it :id: a5d4ea02-f015-4026-b4dc-7365eaf00049 :expectedresults: gpg key is associated with product but and its repository during creation but removed from product and repository after deletion :CaseLevel: Integration """ # Create product, repository and gpg key product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated both with product and its repository product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was disassociated from the product and its repository product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_update_1(self, test_data): """@Test: Update the description of a product @Feature: Product @Assert: Product description is updated """ new_product = make_product( { u'organization-id': self.org['id'] } ) # Fetch it result = Product.info( {u'id': new_product['id'], u'organization-id': self.org['id']}) self.assertEqual( result.return_code, 0, "Product was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") # Update the Descriptions result = Product.update( {u'id': new_product['id'], u'description': test_data['description']} ) # Fetch it result = Product.info( {u'id': new_product['id'], u'organization-id': self.org['id']}) self.assertEqual( result.return_code, 0, "Product was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") self.assertEqual( result.stdout['description'], test_data['description'], "Description was not updated" ) self.assertNotEqual( result.stdout['description'], new_product['description'], "Descriptions should not match" )
def test_positive_logging_from_pulp3(module_org, default_sat): """ Verify Pulp3 logs are getting captured using pulp3 correlation ID :id: 8d5718e6-3442-47d6-b541-0aa78d007e8b :CaseLevel: Component :CaseImportance: High """ source_log = '/var/log/foreman/production.log' test_logfile = '/var/log/messages' # Create custom product and repository product_name = gen_string('alpha') name = product_name label = product_name desc = product_name product = make_product( { 'description': desc, 'label': label, 'name': name, 'organization-id': module_org.id }, ) repo = make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], 'url': settings.repos.yum_0.url, }, ) # Synchronize the repository Product.synchronize({ 'id': product['id'], 'organization-id': module_org.id }) Repository.synchronize({'id': repo['id']}) # Get the id of repository sync from task task_out = default_sat.execute( "hammer task list | grep -F \'Synchronize repository {\"text\"=>\"repository\'" ).stdout.splitlines()[0][:8] prod_log_out = default_sat.execute( f'grep {task_out} {source_log}').stdout.splitlines()[0] # Get correlation id of pulp from production logs pulp_correlation_id = re.search(r'\[I\|bac\|\w{8}\]', prod_log_out).group()[7:15] # verify pulp correlation id in message message_log = default_sat.execute( f'cat {test_logfile} | grep {pulp_correlation_id}') assert message_log.status == 0
def test_positive_delete_key_for_product_with_repo(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository then delete it :id: 1e98e588-8b5d-475c-ad84-5d566df5619c :expectedresults: gpg key is associated with product but and its repository during creation but removed from product and repository after deletion :CaseLevel: Integration """ # Create product, repository and gpg key product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) gpg_key = make_content_credential({'organization-id': module_org.id}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': module_org.id }) # Verify gpg key was associated both with product and its repository product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) repo = Repository.info({'id': repo['id']}) assert product['gpg']['gpg-key'] == gpg_key['name'] assert repo['gpg-key'].get('name') == gpg_key['name'] # Delete the gpg key ContentCredential.delete({ 'name': gpg_key['name'], 'organization-id': module_org.id }) # Verify gpg key was actually deleted with pytest.raises(CLIReturnCodeError): ContentCredential.info({ 'id': gpg_key['id'], 'organization-id': module_org.id }) # Verify gpg key was disassociated from the product and its repository product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) repo = Repository.info({'id': repo['id']}) assert product['gpg']['gpg-key'] != gpg_key['name'] assert repo['gpg-key'].get('name') != gpg_key['name']
def test_positive_delete_key_for_repo_from_product_with_repos(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then delete the key :id: 7d6a278b-1063-4e72-bc32-ca60bd17bb84 :expectedresults: gpg key is associated with a single repository but not the product during creation and removed from repository after deletion :CaseLevel: Integration """ # Create product, repositories and gpg key product = make_product({'organization-id': module_org.id}) repos = [] for _ in range(gen_integer(2, 5)): repos.append(make_repository({'product-id': product['id']})) gpg_key = make_content_credential({'organization-id': module_org.id}) # Associate gpg key with a repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': module_org.id }) # Verify gpg key was associated with the repository repos[0] = Repository.info({'id': repos[0]['id']}) assert repos[0]['gpg-key']['name'] == gpg_key['name'] # Delete the gpg key ContentCredential.delete({ 'name': gpg_key['name'], 'organization-id': module_org.id }) # Verify gpg key was actually deleted with pytest.raises(CLIReturnCodeError): ContentCredential.info({ 'id': gpg_key['id'], 'organization-id': module_org.id }) # Verify gpg key is not associated with any repository or the product # itself product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key'] != gpg_key['name'] for repo in repos: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') != gpg_key['name']
def test_positive_delete_key_for_repo_from_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then delete the key :id: e7ed4ed9-ecfe-4954-b806-cdd0668e8822 :expectedresults: gpg key is associated with a single repository but not the product during creation and removed from repository after deletion :CaseLevel: Integration """ # Create product, repositories and gpg key product = make_product({'organization-id': self.org['id']}) repos = [] for _ in range(gen_integer(2, 5)): repos.append(make_repository({'product-id': product['id']})) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated with the repository repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key']['name'], gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key is not associated with any repository or the product # itself product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_delete_1(self, test_name): """@Test: Check if product can be deleted @Feature: Product @Assert: Product is deleted """ new_product = make_product( { u'name': test_name['name'], u'organization-id': self.org['id'] } ) # Fetch it result = Product.info( {u'id': new_product['id'], u'organization-id': self.org['id']}) self.assertEqual( result.return_code, 0, "Product was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") self.assertEqual( result.stdout['name'], new_product['name'], "Names don't match") self.assertGreater( len(result.stdout['label']), 0, "Label not automatically created" ) # Delete it result = Product.delete({u'id': new_product['id']}) self.assertEqual( result.return_code, 0, "Product was not deleted") self.assertEqual( len(result.stderr), 0, "No error was expected") # Fetch it result = Product.info( {u'id': new_product['id'], u'organization-id': self.org['id']}) self.assertNotEqual( result.return_code, 0, "Product should not be found") self.assertGreater( len(result.stderr), 0, "Error was expected")
def test_positive_create_with_name(self): """Check if product can be created with random names @id: 252a2073-5094-4996-b157-bf7ff81f40af @Assert: Product is created and has random name """ for name in valid_data_list(): with self.subTest(name): product = make_product({ u'name': name, u'organization-id': self.org['id'], }) self.assertEqual(product['name'], name) self.assertGreater(len(product['label']), 0)
def test_negative_create_with_label(self): """Check that only valid labels can be used :id: 7cf970aa-48dc-425b-ae37-1e15dfab0626 :expectedresults: Product is not created :CaseImportance: High """ product_name = gen_alphanumeric() for invalid_label in ( gen_string('latin1', 15), gen_string('utf8', 15), gen_string('html', 15), ): with self.subTest(invalid_label): with self.assertRaises(CLIFactoryError): make_product( { 'label': invalid_label, 'name': product_name, 'organization-id': self.org['id'], } )
def golden_ticket_host_setup(request, module_org): with manifests.clone(name='golden_ticket') as manifest: upload_manifest(module_org.id, manifest.content) new_product = make_product({'organization-id': module_org.id}) new_repo = make_repository({'product-id': new_product['id']}) Repository.synchronize({'id': new_repo['id']}) new_ak = make_activation_key( { 'lifecycle-environment': 'Library', 'content-view': 'Default Organization View', 'organization-id': module_org.id, 'auto-attach': False, } ) return new_ak
def test_positive_disable_hammer_defaults(self): """Verify hammer disable defaults command. :id: d0b65f36-b91f-4f2f-aaf8-8afda3e23708 :steps: 1. Add hammer defaults as organization-id. 2. Verify hammer product list successful. 3. Run hammer --no-use-defaults product list. :expectedresults: Hammer --no-use-defaults product list should fail. :CaseImportance: Critical :BZ: 1640644 """ default_org = make_org() default_product_name = gen_string('alpha') make_product({'name': default_product_name, 'organization-id': default_org['id']}) try: Defaults.add({'param-name': 'organization_id', 'param-value': default_org['id']}) # Verify --organization-id is not required to pass if defaults are set result = ssh.command('hammer product list') self.assertEqual(result.return_code, 0) # Verify product list fail without using defaults result = ssh.command('hammer --no-use-defaults product list') self.assertNotEqual(result.return_code, 0) self.assertFalse(default_product_name in "".join(result.stdout)) # Verify --organization-id is not required to pass if defaults are set result = ssh.command('hammer --use-defaults product list') self.assertEqual(result.return_code, 0) self.assertTrue(default_product_name in "".join(result.stdout)) finally: Defaults.delete({'param-name': 'organization_id'}) result = ssh.command('hammer defaults list') self.assertTrue(default_org['id'] not in "".join(result.stdout))
def test_positive_synchronize_custom_product_future_sync_date(module_org): """Create a sync plan with sync date in a future and sync one custom product with it automatically. :id: 635bffe2-df98-4971-8950-40edc89e479e :expectedresults: Product is synchronized successfully. :CaseLevel: System :BZ: 1655595 """ delay = 2 * 60 # delay for sync date in seconds product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) sync_plan = make_sync_plan({ 'enabled': 'true', 'organization-id': module_org.id, 'sync-date': (datetime.utcnow().replace(second=0) + timedelta(seconds=delay)).strftime(SYNC_DATE_FMT), 'cron-expression': ["*/4 * * * *"], }) # Verify product is not synced and doesn't have any content validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'] }) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product['name']}" f" was not synced by {sync_plan['name']}") sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(repo['id'], module_org.id, max_tries=1) validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product['name']}" f" was synced by {sync_plan['name']}") sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(repo['id'], module_org.id) validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_product_sync_state(module_org): """hammer product info shows correct sync state. :id: 58af6239-85d7-4b8b-bd2d-ab4cd4f29840 :BZ: 1803207,1999541 :customerscenario: true :Steps: 1. Sync a custom repository that fails. 2. Run `hammer product info --product-id <id>`. 3. Successfully sync another repository under the same product. 4. Run `hammer product info --product-id <id>` again. :expectedresults: hammer should show 'Sync Incomplete' in both cases. """ product = make_product({'organization-id': module_org.id}) repo_a1 = make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], 'name': gen_string('alpha'), 'url': f'{gen_url(scheme="https")}:{gen_integer(min_value=10, max_value=9999)}', } ) with pytest.raises(CLIReturnCodeError): Repository.synchronize({'id': repo_a1['id']}) product_info = Product.info({'id': product['id'], 'organization-id': module_org.id}) product_list = Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [p.get('sync-state') for p in product_list] repo_a2 = make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], 'name': gen_string('alpha'), 'url': settings.repos.yum_0.url, }, ) Repository.synchronize({'id': repo_a2['id']}) product_info = Product.info({'id': product['id'], 'organization-id': module_org.id}) product_list = Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [p.get('sync-state') for p in product_list]
def test_positive_synchronize_custom_product_weekly_recurrence(module_org): """Create a weekly sync plan with a past datetime as a sync date, add a custom product and verify the product gets synchronized on the next sync occurrence :id: 1079a66d-7c23-44f6-a4a0-47f4c74d92a4 :expectedresults: Product is synchronized successfully. :BZ: 1396647 :CaseLevel: System """ delay = 2 * 60 product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) start_date = datetime.utcnow() - timedelta(weeks=1) + timedelta( seconds=delay) sync_plan = make_sync_plan({ 'enabled': 'true', 'interval': 'weekly', 'organization-id': module_org.id, 'sync-date': start_date.strftime("%Y-%m-%d %H:%M:%S"), }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'] }) # Wait quarter of expected time logger.info('Waiting {} seconds to check product {}' ' was not synced'.format(delay / 4, product['name'])) sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(repo['id'], max_tries=1) validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence logger.info('Waiting {} seconds to check product {}' ' was synced'.format((delay * 3 / 4), product['name'])) sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(repo['id'], repo_name=repo['name']) validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_synchronize_custom_products_future_sync_date(module_org): """Create a sync plan with sync date in a future and sync multiple custom products with multiple repos automatically. :id: dd262cf3-b836-422c-baca-b3adbc532478 :expectedresults: Products are synchronized successfully. :CaseLevel: System :BZ: 1655595 """ delay = 2 * 60 # delay for sync date in seconds products = [make_product({'organization-id': module_org.id}) for _ in range(3)] repos = [ make_repository({'product-id': product['id']}) for product in products for _ in range(2) ] sync_plan = make_sync_plan( { 'enabled': 'true', 'organization-id': module_org.id, 'sync-date': (datetime.utcnow().replace(second=0) + timedelta(seconds=delay)).strftime( "%Y-%m-%d %H:%M:%S" ), 'cron-expression': ["*/4 * * * *"], } ) # Verify products have not been synced yet for repo in repos: with pytest.raises(AssertionError): validate_task_status(repo['id'], max_tries=1) # Associate sync plan with products for product in products: Product.set_sync_plan({'id': product['id'], 'sync-plan-id': sync_plan['id']}) # Wait quarter of expected time logger.info('Waiting {} seconds to check products were not synced'.format(delay / 4)) sleep(delay / 4) # Verify products has not been synced yet for repo in repos: with pytest.raises(AssertionError): validate_task_status(repo['id'], max_tries=1) # Wait the rest of expected time logger.info('Waiting {} seconds to check products were synced'.format(delay * 3 / 4)) sleep(delay * 3 / 4) # Verify product was synced successfully for repo in repos: validate_task_status(repo['id'], repo_name=repo['name']) validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_add_empty_product(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with empty (no repos) custom product :id: 61c700db-43ab-4b8c-8527-f4cfc085afaa :expectedresults: gpg key is associated with product :CaseLevel: Integration """ gpg_key = make_content_credential({'organization-id': module_org.id}) product = make_product({ 'gpg-key-id': gpg_key['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key'] == gpg_key['name']
def test_positive_add_empty_product(self): """Create gpg key with valid name and valid gpg key via file import then associate it with empty (no repos) custom product :id: b7477c2f-586c-4593-96c0-1fbc532ce8bf :expectedresults: gpg key is associated with product :CaseLevel: Integration """ gpg_key = make_gpg_key({'organization-id': self.org['id']}) product = make_product({ 'gpg-key-id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name'])
def setUpClass(cls): """Create an organization, product and ostree repo.""" super(OstreeBranchTestCase, cls).setUpClass() cls.org = make_org() cls.product = make_product({u'organization-id': cls.org['id']}) # Create new custom ostree repo cls.ostree_repo = make_repository({ u'product-id': cls.product['id'], u'content-type': u'ostree', u'publish-via-http': u'false', u'url': FEDORA23_OSTREE_REPO, }) Repository.synchronize({'id': cls.ostree_repo['id']}) cls.cv = make_content_view({u'organization-id': cls.org['id']}) ContentView.publish({u'id': cls.cv['id']}) cls.cv = ContentView.info({u'id': cls.cv['id']})
def test_positive_create_with_description(self): """Check if product can be created with random description @id: 4b64dc60-ac08-4276-b31a-d3851ae064ba @Assert: Product is created and has random description """ for desc in valid_data_list(): with self.subTest(desc): product_name = gen_alphanumeric() product = make_product({ u'description': desc, u'name': product_name, u'organization-id': self.org['id'], }) self.assertEqual(product['name'], product_name) self.assertEqual(product['description'], desc)
def test_positive_create_with_label(self): """Check if product can be created with random labels @id: 07ff96b2-cc55-4d07-86a2-f20b77cc9b14 @Assert: Product is created and has random label """ for label in valid_labels_list(): with self.subTest(label): product_name = gen_alphanumeric() product = make_product({ u'label': label, u'name': product_name, u'organization-id': self.org['id'], }) self.assertEqual(product['name'], product_name) self.assertEqual(product['label'], label)
def test_positive_create_with_sync_plan(self): """Check if product can be created with sync plan @id: c54ff608-9f59-4fd6-a45c-bd70ce656023 @Assert: Product is created and has random sync plan """ sync_plan = make_sync_plan({u'organization-id': self.org['id']}) for name in valid_data_list(): with self.subTest(name): product = make_product({ u'name': name, u'organization-id': self.org['id'], u'sync-plan-id': sync_plan['id'], }) self.assertEqual(product['name'], name) self.assertEqual(product['sync-plan-id'], sync_plan['id'])
def test_positive_create_with_gpg_key(self): """Check if product can be created with gpg key @id: 64f02b3b-f8c1-42c5-abb2-bf963ac24670 @Assert: Product is created and has gpg key """ gpg_key = make_gpg_key({u'organization-id': self.org['id']}) for name in valid_data_list(): with self.subTest(name): product = make_product({ u'gpg-key-id': gpg_key['id'], u'name': name, u'organization-id': self.org['id'], }) self.assertEqual(product['name'], name) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id'])
def test_positive_delete_key_for_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository then delete the key :id: 3658e04d-fc63-499f-a22d-b512941cc96b :expectedresults: gpg key is associated with the single repository but not the product during creation and was removed from repository after deletion :CaseLevel: Integration """ # Create product, repository and gpg key product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repo['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated with the repository but not with the # product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was disassociated from the repository repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_update_key_for_empty_product(self): """Create gpg key with valid name and valid gpg key via file import then associate it with empty (no repos) custom product then update the key :id: c0c84c45-21fc-4940-9d52-00babb807ec7 :expectedresults: gpg key is associated with product before/after update :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name)