def test_product_list_with_default_settings(module_org, default_sat): """Listing product of an organization apart from default organization using hammer does not return output if a defaults settings are applied on org. :id: d5c5edac-b19c-4277-92fe-28d9b9fa43ef :BZ: 1745575 :customerscenario: true :expectedresults: product/reporsitory list should work as expected. """ org_id = str(module_org.id) default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') non_default_org = make_org() default_product = make_product({ 'name': default_product_name, 'organization-id': org_id }) non_default_product = make_product({ 'name': non_default_product_name, 'organization-id': non_default_org['id'] }) for product in default_product, non_default_product: make_repository( { 'organization-id': org_id, 'product-id': product['id'], 'url': settings.repos.yum_0.url, }, ) Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) result = default_sat.cli.Defaults.list(per_page=False) assert any([ res['value'] == org_id for res in result if res['parameter'] == 'organization_id' ]) try: # Verify --organization-id is not required to pass if defaults are set result = default_sat.cli.Product.list() assert any([res['name'] == default_product_name for res in result]) result = default_sat.cli.Repository.list() assert any([res['product'] == default_product_name for res in result]) # verify that defaults setting should not affect other entities product_list = Product.list({'organization-id': non_default_org['id']}) assert non_default_product_name == product_list[0]['name'] repository_list = Repository.list( {'organization-id': non_default_org['id']}) assert non_default_product_name == repository_list[0]['product'] finally: Defaults.delete({'param-name': 'organization_id'}) result = default_sat.cli.Defaults.list(per_page=False) assert not [ res for res in result if res['parameter'] == 'organization_id' ]
def setupScenario(self): """ Create yum, puppet repositories and synchronize them. """ self.org = make_org({'name': self.org_name}) self.product = make_product({ 'name': self.product_name, 'organization-id': self.org['id'] }) self.yum_repo1 = make_repository({ 'name': self.yum_repo1_name, 'product-id': self.product['id'], 'content-type': 'yum', 'url': FAKE_1_YUM_REPO }) Repository.synchronize({'id': self.yum_repo1['id']}) self.module = {'name': self.puppet_module_name, 'version': '3.3.3'} self.puppet_repo = make_repository({ 'name': self.puppet_repo_name, 'content-type': 'puppet', 'product-id': self.product['id'], 'url': CUSTOM_PUPPET_REPO, }) Repository.synchronize({'id': self.puppet_repo['id']}) self.puppet_module = PuppetModule.list({ 'search': 'name={name} and version={version}'.format(**self.module) })[0]
def test_product_synchronize_by_label(self): """@Test: Check if product can be synchronized. Searches for organization by its label @Feature: Product @Assert: Product was synchronized """ try: org = make_org() product = make_product({'organization-id': org['id']}) make_repository({'product-id': product['id']}) except CLIFactoryError as err: self.fail(err) result = Product.synchronize({ 'id': product['id'], 'organization-label': org['label'], }) self.assertEqual(result.return_code, 0) result = Product.info({ 'id': product['id'], 'organization-id': org['id'], }) self.assertEqual(result.return_code, 0) self.assertEqual(u'Syncing Complete.', result.stdout['sync-state'])
def test_positive_product_sync_state(module_org): """hammer product info shows correct sync state. :id: 58af6239-85d7-4b8b-bd2d-ab4cd4f29840 :BZ: 1803207,1999541 :customerscenario: true :Steps: 1. Sync a custom repository that fails. 2. Run `hammer product info --product-id <id>`. 3. Successfully sync another repository under the same product. 4. Run `hammer product info --product-id <id>` again. :expectedresults: hammer should show 'Sync Incomplete' in both cases. """ product = make_product({'organization-id': module_org.id}) repo_a1 = make_repository({ 'organization-id': module_org.id, 'product-id': product['id'], 'name': gen_string('alpha'), 'url': f'{gen_url(scheme="https")}:{gen_integer(min_value=10, max_value=9999)}', }) with pytest.raises(CLIReturnCodeError): Repository.synchronize({'id': repo_a1['id']}) product_info = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) product_list = Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [ p.get('sync-state') for p in product_list ] repo_a2 = make_repository( { 'organization-id': module_org.id, 'product-id': product['id'], 'name': gen_string('alpha'), 'url': settings.repos.yum_0.url, }, ) Repository.synchronize({'id': repo_a2['id']}) product_info = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) product_list = Product.list({'organization-id': module_org.id}) assert product_info['sync-state-(last)'] in [ p.get('sync-state') for p in product_list ]
def test_product_list_with_default_settings(self): """Listing product of an organization apart from default organization using hammer does not return output if a defaults settings are applied on org. :id: d5c5edac-b19c-4277-92fe-28d9b9fa43ef :BZ: 1745575 :expectedresults: product/reporsitory list should work as expected. """ default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') default_org = self.org non_default_org = make_org() default_product = make_product({ 'name': default_product_name, 'organization-id': default_org['id'] }) non_default_product = make_product({ 'name': non_default_product_name, 'organization-id': non_default_org['id'] }) for product in (default_product, non_default_product): make_repository({ 'product-id': product['id'], 'url': FAKE_0_YUM_REPO }) Defaults.add({ 'param-name': 'organization_id', 'param-value': default_org['id'] }) result = ssh.command('hammer defaults list') self.assertTrue(default_org['id'] in "".join(result.stdout)) try: # Verify --organization-id is not required to pass if defaults are set result = ssh.command('hammer product list') self.assertTrue(default_product_name in "".join(result.stdout)) result = ssh.command('hammer repository list') self.assertTrue(default_product_name in "".join(result.stdout)) # verify that defaults setting should not affect other entities product_list = Product.list( {'organization-id': non_default_org['id']}) self.assertEquals(non_default_product_name, product_list[0]['name']) repository_list = Repository.list( {'organization-id': non_default_org['id']}) self.assertEquals(non_default_product_name, repository_list[0]['product']) finally: Defaults.delete({'param-name': 'organization_id'}) result = ssh.command('hammer defaults list') self.assertTrue(default_org['id'] not in "".join(result.stdout))
def test_product_list_with_default_settings(module_org): """Listing product of an organization apart from default organization using hammer does not return output if a defaults settings are applied on org. :id: d5c5edac-b19c-4277-92fe-28d9b9fa43ef :BZ: 1745575 :customerscenario: true :expectedresults: product/reporsitory list should work as expected. """ org_id = str(module_org.id) default_product_name = gen_string('alpha') non_default_product_name = gen_string('alpha') non_default_org = make_org() default_product = make_product({ 'name': default_product_name, 'organization-id': org_id }) non_default_product = make_product({ 'name': non_default_product_name, 'organization-id': non_default_org['id'] }) for product in default_product, non_default_product: make_repository( { 'organization-id': org_id, 'product-id': product['id'], 'url': FAKE_0_YUM_REPO, }, ) Defaults.add({'param-name': 'organization_id', 'param-value': org_id}) result = ssh.command('hammer defaults list') assert org_id in "".join(result.stdout) try: # Verify --organization-id is not required to pass if defaults are set result = ssh.command('hammer product list') assert default_product_name in "".join(result.stdout) result = ssh.command('hammer repository list') assert default_product_name in "".join(result.stdout) # verify that defaults setting should not affect other entities product_list = Product.list({'organization-id': non_default_org['id']}) assert non_default_product_name == product_list[0]['name'] repository_list = Repository.list( {'organization-id': non_default_org['id']}) assert non_default_product_name == repository_list[0]['product'] finally: Defaults.delete({'param-name': 'organization_id'}) result = ssh.command('hammer defaults list') assert org_id not in "".join(result.stdout)
def test_product_sync_by_label(self): """@Test: Check if product can be synchronized. Searches for organization by its label @Feature: Product @Assert: Product was synchronized """ org = make_org() product = make_product({"organization-id": org["id"]}) make_repository({"product-id": product["id"]}) Product.synchronize({"id": product["id"], "organization-label": org["label"]}) product = Product.info({"id": product["id"], "organization-id": org["id"]}) self.assertEqual(u"Syncing Complete.", product["sync-state"])
def test_positive_package_count(self): """Check that packages count is correctly filtered by product id :id: 151f60a3-0b94-4658-8b0d-0d022f4f1d8f :expectedresults: Packages only from synced product returned :BZ: 1422552 :CaseLevel: Integration """ org = make_org() for _ in range(3): product = make_product({'organization-id': org['id']}) repo = make_repository({ 'product-id': product['id'], 'url': FAKE_0_YUM_REPO, }) Product.synchronize({ 'id': product['id'], 'organization-id': org['id'], }) packages = Package.list({'product-id': product['id']}) repo = Repository.info({'id': repo['id']}) self.assertEqual( int(repo['content-counts']['packages']), len(packages) ) self.assertEqual(len(packages), FAKE_0_YUM_REPO_PACKAGES_COUNT)
def test_positive_list_multiple_repos(self): """Verify that puppet-modules list for specific repo is correct and does not affected by other repositories. :id: f36d25b3-2495-4e89-a1cf-e39d52762d95 :expectedresults: Number of modules has no changed after a second repo was synced. :CaseImportance: Critical """ # Verify that number of synced modules is correct repo1 = Repository.info({'id': self.repo['id']}) repo_content_count = repo1['content-counts']['puppet-modules'] modules_num = len(PuppetModule.list({'repository-id': repo1['id']})) self.assertEqual(repo_content_count, str(modules_num)) # Create and sync second repo repo2 = make_repository({ u'organization-id': self.org['id'], u'product-id': self.product['id'], u'content-type': u'puppet', u'url': FAKE_1_PUPPET_REPO, }) Repository.synchronize({'id': repo2['id']}) # Verify that number of modules from the first repo has not changed self.assertEqual( modules_num, len(PuppetModule.list({'repository-id': repo1['id']})))
def test_negative_synchronize_custom_product_past_sync_date(module_org): """Verify product won't get synced immediately after adding association with a sync plan which has already been started :id: c80f5c0c-3863-47da-8d7b-7d65c73664b0 :expectedresults: Repository was not synchronized :BZ: 1279539 :CaseLevel: System """ sync_plan = make_sync_plan({ 'enabled': 'true', 'organization-id': module_org.id, 'sync-date': datetime.utcnow().strftime(SYNC_DATE_FMT), }) product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'] }) with pytest.raises(AssertionError): validate_task_status(repo['id'], max_tries=2)
def test_negative_synchronize_custom_product_current_sync_date(self): """Verify product won't get synced immediately after adding association with a sync plan which has already been started :id: c80f5c0c-3863-47da-8d7b-7d65c73664b0 :expectedresults: Repository was not synchronized :BZ: 1279539 :CaseLevel: System """ sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': self.org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) with self.assertRaises(AssertionError): self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'], max_attempts=5, )
def test_positive_add_repo_from_product_with_repo(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository :id: da568a0e-69b1-498e-a747-6881aac7409e :expectedresults: gpg key is associated with the repository but not with the product :CaseLevel: Integration """ product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) gpg_key = make_content_credential({'organization-id': module_org.id}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': module_org.id }) product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) repo = Repository.info({'id': repo['id']}) assert repo['gpg-key']['id'] == gpg_key['id'] assert product['gpg'].get('gpg-key-id') != gpg_key['id']
def setUpClass(cls): """Init single organization, product and repository for all tests""" super(TestContentViewFilter, cls).setUpClass() cls.org = make_org() cls.product = make_product({u'organization-id': cls.org['id']}) cls.repo = make_repository({u'product-id': cls.product['id']}) Repository.synchronize({u'id': cls.repo['id']})
def test_positive_synchronize_custom_products_future_sync_date(module_org): """Create a sync plan with sync date in a future and sync multiple custom products with multiple repos automatically. :id: dd262cf3-b836-422c-baca-b3adbc532478 :expectedresults: Products are synchronized successfully. :CaseLevel: System :BZ: 1655595 """ delay = 4 * 60 # delay for sync date in seconds products = [ make_product({'organization-id': module_org.id}) for _ in range(3) ] repos = [ make_repository({'product-id': product['id']}) for product in products for _ in range(2) ] sync_plan = make_sync_plan({ 'enabled': 'true', 'organization-id': module_org.id, 'sync-date': (datetime.utcnow().replace(second=0) + timedelta(seconds=delay)).strftime(SYNC_DATE_FMT), 'cron-expression': ["*/4 * * * *"], }) # Verify products have not been synced yet logger.info( f"Check products {products[0]['name']} and {products[1]['name']}" f" were not synced before sync plan created in org {module_org.label}") for repo in repos: with pytest.raises(AssertionError): validate_task_status(repo['id'], module_org.id, max_tries=1) # Associate sync plan with products for product in products: Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'] }) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check products {products[0]['name']}" f" and {products[1]['name']} were not synced by {sync_plan['name']} ") sleep(delay / 4) # Verify products have not been synced yet for repo in repos: with pytest.raises(AssertionError): validate_task_status(repo['id'], module_org.id, max_tries=1) # Wait the rest of expected time logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {products[0]['name']}" f" and {products[1]['name']} were synced by {sync_plan['name']}") sleep(delay * 3 / 4) # Verify products were synced successfully for repo in repos: validate_task_status(repo['id'], module_org.id) validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_list_multiple_repos(self): """Verify that puppet-modules list for specific repo is correct and does not affected by other repositories. @id: f36d25b3-2495-4e89-a1cf-e39d52762d95 @Assert: Number of modules has no changed after a second repo was synced. """ # Verify that number of synced modules is correct repo1 = Repository.info({'id': self.repo['id']}) repo_content_count = repo1['content-counts']['puppet-modules'] modules_num = len( PuppetModule.list({'repository-id': repo1['id']})) self.assertEqual(repo_content_count, str(modules_num)) # Create and sync second repo repo2 = make_repository({ u'organization-id': self.org['id'], u'product-id': self.product['id'], u'content-type': u'puppet', u'url': FAKE_1_PUPPET_REPO, }) Repository.synchronize({'id': repo2['id']}) # Verify that number of modules from the first repo has not changed self.assertEqual( modules_num, len(PuppetModule.list({'repository-id': repo1['id']})) )
def test_positive_check_errata_dates(module_org): """Check for errata dates in `hammer erratum list` :id: b19286ae-bdb4-4319-87d0-5d3ff06c5f38 :expectedresults: Display errata date when using hammer erratum list :CaseImportance: High :BZ: 1695163 """ product = entities.Product(organization=module_org).create() repo = make_repository({ 'content-type': 'yum', 'product-id': product.id, 'url': REPO_WITH_ERRATA['url'] }) # Synchronize custom repository Repository.synchronize({'id': repo['id']}) result = Erratum.list(options={'per-page': '5', 'fields': 'Issued'}) assert 'issued' in result[0] # Verify any errata ISSUED date from stdout validate_issued_date = datetime.strptime(result[0]['issued'], '%Y-%m-%d').date() assert isinstance(validate_issued_date, date) result = Erratum.list(options={'per-page': '5', 'fields': 'Updated'}) assert 'updated' in result[0] # Verify any errata UPDATED date from stdout validate_updated_date = datetime.strptime(result[0]['updated'], '%Y-%m-%d').date() assert isinstance(validate_updated_date, date)
def _make_repository(self, options=None): """Makes a new repository and asserts its success""" if options is None: options = {} if not options.get('product-id', None): options[u'product-id'] = self.product['id'] new_repo = make_repository(options) # Fetch it result = Repository.info( { u'id': new_repo['id'] } ) self.assertEqual( result.return_code, 0, "Repository was not found") self.assertEqual( len(result.stderr), 0, "No error was expected") # Return the repository dictionary return new_repo
def test_positive_package_count(self): """Check that packages count is correctly filtered by product id :id: 151f60a3-0b94-4658-8b0d-0d022f4f1d8f :expectedresults: Packages only from synced product returned :BZ: 1422552 :CaseLevel: Integration """ org = make_org() for _ in range(3): product = make_product({'organization-id': org['id']}) repo = make_repository({ 'product-id': product['id'], 'url': FAKE_0_YUM_REPO, }) Product.synchronize({ 'id': product['id'], 'organization-id': org['id'], }) packages = Package.list({'product-id': product['id']}) repo = Repository.info({'id': repo['id']}) self.assertEqual(int(repo['content-counts']['packages']), len(packages)) self.assertEqual(len(packages), FAKE_0_YUM_REPO_PACKAGES_COUNT)
def test_positive_sync_publish_cv(self): """Synchronize repository with DRPMs, add repository to content view and publish content view @id: 014bfc80-4622-422e-a0ec-755b1d9f845e @Assert: drpms can be listed in content view """ repo = make_repository({ 'product-id': self.product['id'], 'url': FAKE_YUM_DRPM_REPO, }) Repository.synchronize({'id': repo['id']}) cv = make_content_view({'organization-id': self.org['id']}) ContentView.add_repository({ 'id': cv['id'], 'repository-id': repo['id'], }) ContentView.publish({'id': cv['id']}) result = ssh.command( 'ls /var/lib/pulp/published/yum/https/repos/{}/content_views/{}' '/1.0/custom/{}/{}/drpms/ | grep .drpm'.format( self.org['label'], cv['label'], self.product['label'], repo['label'], )) self.assertEqual(result.return_code, 0) self.assertGreaterEqual(len(result.stdout), 1)
def test_positive_sync_publish_cv(self): """Synchronize repository with SRPMs, add repository to content view and publish content view @id: 78cd6345-9c6c-490a-a44d-2ad64b7e959b @Assert: srpms can be listed in content view """ repo = make_repository({ 'product-id': self.product['id'], 'url': FAKE_YUM_SRPM_REPO, }) Repository.synchronize({'id': repo['id']}) cv = make_content_view({'organization-id': self.org['id']}) ContentView.add_repository({ 'id': cv['id'], 'repository-id': repo['id'], }) ContentView.publish({'id': cv['id']}) result = ssh.command( 'ls /var/lib/pulp/published/yum/https/repos/{}/content_views/{}' '/1.0/custom/{}/{}/ | grep .src.rpm'.format( self.org['label'], cv['label'], self.product['label'], repo['label'], )) self.assertEqual(result.return_code, 0) self.assertGreaterEqual(len(result.stdout), 1)
def test_update_cvf_with_new_repo_negative(self): """Test: Try to update filter and assign repository which does not belong to filter content view @Feature: Content View Filter @Assert: Content view filter is not updated """ result = ContentView.filter_create({ 'content-view-id': self.content_view['id'], 'type': 'rpm', 'name': self.cvf_name, 'repository-ids': self.repo['id'], }) self.assertEqual(result.return_code, 0) new_repo = make_repository({u'product-id': self.product['id']}) result = ContentView.filter_update({ 'content-view-id': self.content_view['id'], 'name': self.cvf_name, 'repository-ids': new_repo['id'], }) self.assertNotEqual(result.return_code, 0) self.assertNotEqual(len(result.stderr), 0)
def test_positive_add_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository :id: b05c5223-44d5-4a48-9d99-18ca351c84a5 :expectedresults: gpg key is associated with product as well as with the repositories :CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_synchronize_custom_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one custom product with it automatically. @Assert: Product is synchronized successfully. @Feature: SyncPlan """ delay = 10 * 60 # delay for sync date in seconds sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': self.org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)) .strftime("%Y-%m-%d %H:%M:%S"), }) product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) # Verify product is not synced and doesn't have any content self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(delay/2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(delay/2) # Verify product was synced successfully self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'])
def test_positive_sync_publish_promote_cv(self): """Synchronize repository with DRPMs, add repository to content view, publish and promote content view to lifecycle environment @id: a01cb12b-d388-4902-8532-714f4e28ec56 @Assert: drpms can be listed in content view in proper lifecycle environment """ lce = make_lifecycle_environment({"organization-id": self.org["id"]}) repo = make_repository({"product-id": self.product["id"], "url": FAKE_YUM_DRPM_REPO}) Repository.synchronize({"id": repo["id"]}) cv = make_content_view({"organization-id": self.org["id"]}) ContentView.add_repository({"id": cv["id"], "repository-id": repo["id"]}) ContentView.publish({"id": cv["id"]}) content_view = ContentView.info({"id": cv["id"]}) cvv = content_view["versions"][0] ContentView.version_promote({"id": cvv["id"], "to-lifecycle-environment-id": lce["id"]}) result = ssh.command( "ls /var/lib/pulp/published/yum/https/repos/{}/{}/{}/custom/{}/{}" "/drpms/ | grep .drpm".format( self.org["label"], lce["label"], cv["label"], self.product["label"], repo["label"] ) ) self.assertEqual(result.return_code, 0) self.assertGreaterEqual(len(result.stdout), 1)
def test_negative_update_with_invalid_repo_id(self): """Try to update filter and assign repository which does not belong to filter content view :id: aa550619-c436-4184-bb29-2becadf69e5b :expectedresults: Content view filter is not updated :CaseImportance: Critical """ cvf_name = gen_string('utf8') ContentView.filter.create({ 'content-view-id': self.content_view['id'], 'name': cvf_name, 'repository-ids': self.repo['id'], 'type': 'rpm', }) new_repo = make_repository({u'product-id': self.product['id']}) with self.assertRaises(CLIReturnCodeError): ContentView.filter.update({ 'content-view-id': self.content_view['id'], 'name': cvf_name, 'repository-ids': new_repo['id'], })
def test_positive_synchronize_custom_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a custom product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': self.org['id'], 'sync-date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), }) product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( repo, ['errata', 'packages'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'])
def test_positive_add_product_with_repo(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository :id: f315eadd-e65b-4952-912f-f640867ad656 :expectedresults: gpg key is associated with product as well as with the repository :CaseLevel: Integration """ product = make_product({'organization-id': module_org.id}) repo = make_repository({'product-id': product['id']}) gpg_key = make_content_credential({'organization-id': module_org.id}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': module_org.id }) product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) repo = Repository.info({'id': repo['id']}) assert product['gpg']['gpg-key-id'] == gpg_key['id'] assert repo['gpg-key']['id'] == gpg_key['id']
def test_positive_add_product_with_repos(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository :id: 76683f3e-7705-4719-996e-c026839053bb :expectedresults: gpg key is associated with product as well as with the repositories :CaseLevel: Integration """ product = make_product({'organization-id': module_org.id}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_content_credential({'organization-id': module_org.id}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': module_org.id }) product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key-id'] == gpg_key['id'] for repo in repos: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key']['id'] == gpg_key['id']
def make_file_repository_upload_contents(self, options=None): """Makes a new File repository, Upload File/Multiple Files and asserts its success. """ if options is None: options = { 'name': self.file_repo_name, 'product-id': self.product['id'], 'content-type': 'file', } if not options.get('content-type'): raise CLIFactoryError('Please provide a valid Content Type.') file_repo = make_repository(options) remote_path = "/tmp/{0}".format(RPM_TO_UPLOAD) if 'multi_upload' not in options or not options['multi_upload']: ssh.upload_file(local_file=get_data_file(RPM_TO_UPLOAD), remote_file=remote_path) else: remote_path = "/tmp/{}/".format(gen_string('alpha')) ssh.upload_files(local_dir=os.getcwd() + "/../data/", remote_dir=remote_path) result = Repository.upload_content( { 'name': file_repo['name'], 'organization': file_repo['organization'], 'path': remote_path, 'product-id': file_repo['product']['id'], } ) self.assertIn( "Successfully uploaded file '{0}'".format(RPM_TO_UPLOAD), result[0]['message'] ) file_repo = Repository.info({'id': file_repo['id']}) self.assertGreater(int(file_repo['content-counts']['files']), 0) return file_repo
def test_positive_add_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository :id: 5529a852-9ef6-48f8-b2bc-2bbf463657dd :expectedresults: gpg key is associated with product as well as with the repository :CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_update_cvf_repo(self): """Test: Create new content view filter and apply it to existing content view that has repository assigned to it. Try to update that filter and change affected repository on another one. @Feature: Content View Filter @Assert: Content view filter updated successfully and has new repository affected """ cvf_name = gen_string("utf8") ContentView.filter_create( { "content-view-id": self.content_view["id"], "name": cvf_name, "repository-ids": self.repo["id"], "type": "rpm", } ) cvf = ContentView.filter_info({u"content-view-id": self.content_view["id"], u"name": cvf_name}) self.assertEqual(len(cvf["repositories"]), 1) self.assertEqual(cvf["repositories"][0]["name"], self.repo["name"]) new_repo = make_repository({u"product-id": self.product["id"]}) ContentView.add_repository({u"id": self.content_view["id"], u"repository-id": new_repo["id"]}) ContentView.filter_update( {"content-view-id": self.content_view["id"], "name": cvf_name, "repository-ids": new_repo["id"]} ) cvf = ContentView.filter_info({u"content-view-id": self.content_view["id"], u"name": cvf_name}) self.assertEqual(len(cvf["repositories"]), 1) self.assertNotEqual(cvf["repositories"][0]["name"], self.repo["name"]) self.assertEqual(cvf["repositories"][0]["name"], new_repo["name"])
def test_negative_update_with_invalid_repo_id(self): """Try to update filter and assign repository which does not belong to filter content view @Feature: Content View Filter @Assert: Content view filter is not updated """ cvf_name = gen_string('utf8') ContentView.filter_create({ 'content-view-id': self.content_view['id'], 'name': cvf_name, 'repository-ids': self.repo['id'], 'type': 'rpm', }) new_repo = make_repository({u'product-id': self.product['id']}) with self.assertRaises(CLIReturnCodeError): ContentView.filter_update({ 'content-view-id': self.content_view['id'], 'name': cvf_name, 'repository-ids': new_repo['id'], })
def test_positive_add_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository :id: 1427f145-9faf-41ef-ae42-dc91d61ce1f6 :expectedresults: gpg key is associated with the repository but not with the product :CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def setUpClass(cls): """Create Directory for all CV Sync Tests in /tmp""" super(ContentViewSync, cls).setUpClass() cls.exporting_org = make_org() cls.exporting_prod = gen_string('alpha') product = make_product({ 'organization-id': cls.exporting_org['id'], 'name': cls.exporting_prod }) cls.exporting_repo = gen_string('alpha') repo = make_repository({ 'name': cls.exporting_repo, 'download-policy': 'immediate', 'product-id': product['id'] }) Repository.synchronize({'id': repo['id']}) cls.exporting_cv = gen_string('alpha') content_view = make_content_view({ 'name': cls.exporting_cv, 'organization-id': cls.exporting_org['id'] }) ContentView.add_repository({ 'id': content_view['id'], 'organization-id': cls.exporting_org['id'], 'repository-id': repo['id'] }) ContentView.publish({u'id': content_view['id']}) content_view = ContentView.info({u'id': content_view['id']}) cls.exporting_cvv_id = content_view['versions'][0]['id']
def set_importing_org(self, product, repo, cv): """Sets same CV, product and repository in importing organization as exporting organization :param str product: The product name same as exporting product :param str repo: The repo name same as exporting repo :param str cv: The cv name same as exporting cv """ self.importing_org = make_org() importing_prod = make_product({ 'organization-id': self.importing_org['id'], 'name': product }) importing_repo = make_repository({ 'name': repo, 'download-policy': 'immediate', 'product-id': importing_prod['id'] }) self.importing_cv = make_content_view({ 'name': cv, 'organization-id': self.importing_org['id'] }) ContentView.add_repository({ 'id': self.importing_cv['id'], 'organization-id': self.importing_org['id'], 'repository-id': importing_repo['id'] })
def test_positive_add_repo_from_product_with_repo(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository @feature: GPG Keys @assert: gpg key is associated with the repository but not with the product """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def test_positive_add_product_with_repos(self): """@test: Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository @feature: GPG Keys @assert: gpg key is associated with product as well as with the repositories """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def test_positive_add_repo_from_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has one repository @id: 1427f145-9faf-41ef-ae42-dc91d61ce1f6 @assert: gpg key is associated with the repository but not with the product @CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key-id': gpg_key['id'], 'id': repo['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id'])
def test_positive_add_repo_from_product_with_repos(self): """@test: Create gpg key via file import and associate with custom repo GPGKey should contain valid name and valid key and should be associated to one repository from custom product. Make sure custom product should have more than one repository. @feature: GPG Keys @assert: gpg key is associated with the repository """ product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg'].get('gpg-key-id'), gpg_key['id']) # First repo should have a valid gpg key assigned repo = Repository.info({'id': repos.pop(0)['id']}) self.assertEqual(repo['gpg-key']['id'], gpg_key['id']) # The rest of repos should not for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('id'), gpg_key['id'])
def test_positive_add_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository @id: 5529a852-9ef6-48f8-b2bc-2bbf463657dd @assert: gpg key is associated with product as well as with the repository @CaseLevel: Integration """ product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key-id'], gpg_key['id']) self.assertEqual(repo['gpg-key']['id'], gpg_key['id'])
def setup_content(request): """Pytest fixture for setting up an organization, manifest, content-view, lifecycle environment, and activation key with subscriptions""" org = make_org() with manifests.clone() as manifest: upload_file(manifest.content, manifest.filename) new_product = make_product({'organization-id': org['id']}) new_repo = make_repository({'product-id': new_product['id']}) Repository.synchronize({'id': new_repo['id']}) content_view = make_content_view({'organization-id': org['id']}) ContentView.add_repository( {'id': content_view['id'], 'organization-id': org['id'], 'repository-id': new_repo['id']} ) ContentView.publish({'id': content_view['id']}) env = make_lifecycle_environment({'organization-id': org['id']}) cvv = ContentView.info({'id': content_view['id']})['versions'][0] ContentView.version_promote({'id': cvv['id'], 'to-lifecycle-environment-id': env['id']}) new_ak = make_activation_key( { 'lifecycle-environment-id': env['id'], 'content-view': content_view['name'], 'organization-id': org['id'], 'auto-attach': False, } ) subs_id = Subscription.list({'organization-id': org['id']}, per_page=False) ActivationKey.add_subscription({'id': new_ak['id'], 'subscription-id': subs_id[0]['id']}) request.cls.setup_org = org request.cls.setup_new_ak = new_ak request.cls.setup_subs_id = subs_id request.cls.setup_env = env request.cls.setup_content_view = content_view
def test_negative_update_with_invalid_repo_id(self, module_org, module_product, sync_repo, content_view): """Try to update filter and assign repository which does not belong to filter content view :id: aa550619-c436-4184-bb29-2becadf69e5b :expectedresults: Content view filter is not updated :CaseImportance: Critical """ cvf_name = gen_string('utf8') ContentView.filter.create( { 'content-view-id': content_view['id'], 'name': cvf_name, 'repository-ids': sync_repo['id'], 'type': 'rpm', }, ) new_repo = make_repository( { 'organization-id': module_org.id, 'product-id': module_product.id }, ) with pytest.raises(CLIReturnCodeError): ContentView.filter.update({ 'content-view-id': content_view['id'], 'name': cvf_name, 'repository-ids': new_repo['id'], })
def sync_repo(module_org, module_product): repo = make_repository({ 'organization-id': module_org.id, 'product-id': module_product.id }) Repository.synchronize({'id': repo['id']}) return repo
def test_positive_synchronize_custom_products_future_sync_date(self): """Create a sync plan with sync date in a future and sync multiple custom products with multiple repos automatically. :id: dd262cf3-b836-422c-baca-b3adbc532478 :expectedresults: Products are synchronized successfully. :CaseLevel: System """ delay = 6 * 60 # delay for sync date in seconds sync_plan = self._make_sync_plan({ 'enabled': 'true', 'organization-id': self.org['id'], 'sync-date': (datetime.utcnow() + timedelta(seconds=delay)) .strftime("%Y-%m-%d %H:%M:%S"), }) products = [ make_product({'organization-id': self.org['id']}) for _ in range(3) ] repos = [ make_repository({'product-id': product['id']}) for product in products for _ in range(2) ] # Verify products have not been synced yet for repo in repos: with self.assertRaises(AssertionError): self.validate_task_status(repo['id'], max_tries=2) # Associate sync plan with products for product in products: Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Wait half of expected time self.logger.info('Waiting {0} seconds to check products' ' were not synced'.format(delay/2)) sleep(delay/4) # Verify products has not been synced yet for repo in repos: with self.assertRaises(AssertionError): self.validate_task_status(repo['id'], max_tries=2) # Wait the rest of expected time self.logger.info('Waiting {0} seconds to check products' ' were synced'.format(delay/2)) sleep(delay * 3/4) # Re-calculate and Update with the current UTC time SyncPlan.update({ u'id': sync_plan['id'], u'sync-date': (datetime.utcnow() + timedelta(seconds=delay)) .strftime("%Y-%m-%d %H:%M:%S"), }) # Verify product was synced successfully for repo in repos: self.validate_task_status(repo['id'], repo_name=repo['name']) self.validate_repo_content( repo, ['errata', 'package-groups', 'packages'])
def test_positive_update_key_for_product_with_repos(module_org): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key :id: 8aa3dc75-6257-48ae-b3f9-c617e323b47a :expectedresults: gpg key is associated with product before/after update as well as with the repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': module_org.id}) gpg_key = make_content_credential({'organization-id': module_org.id}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': module_org.id }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key'] == gpg_key['name'] for repo in repos: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') == gpg_key['name'] # Update the gpg key new_name = gen_choice(list(valid_data_list().values())) ContentCredential.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': module_org.id }) # Verify changes are reflected in the gpg key gpg_key = ContentCredential.info({ 'id': gpg_key['id'], 'organization-id': module_org.id }) assert gpg_key['name'] == new_name # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': module_org.id }) assert product['gpg']['gpg-key'] == new_name # Verify changes are reflected in the repositories for repo in repos: repo = Repository.info({'id': repo['id']}) assert repo['gpg-key'].get('name') == new_name
def test_positive_update_key_for_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key @id: a95eb51b-4b6b-4c04-bb4d-cbe600431850 @assert: gpg key is associated with product before/after update as well as with the repositories @CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repositories for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), new_name)
def test_positive_update_key_for_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then update the key :id: a95eb51b-4b6b-4c04-bb4d-cbe600431850 :expectedresults: gpg key is associated with product before/after update as well as with the repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repositories for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), new_name)
def setUpClass(cls): """Init single organization, product and repository for all tests""" super(TestContentViewFilter, cls).setUpClass() cls.org = make_org() cls.product = make_product({u"organization-id": cls.org["id"]}) cls.repo = make_repository({u"product-id": cls.product["id"]}) Repository.synchronize({u"id": cls.repo["id"]}) cls.content_view = make_content_view({u"organization-id": cls.org["id"]}) ContentView.add_repository({u"id": cls.content_view["id"], u"repository-id": cls.repo["id"]})
def _make_repository(self, options=None): """Makes a new repository and asserts its success""" if options is None: options = {} if options.get('product-id') is None: options[u'product-id'] = self.product['id'] return make_repository(options)
def test_positive_synchronize_custom_product_past_sync_date(self): """Create a sync plan with a past datetime as a sync date, add a custom product and verify the product gets synchronized on the next sync occurrence :id: 21efdd08-698c-443c-a681-edce19a4c83a :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 product = make_product({'organization-id': self.org['id']}) repo = make_repository({'product-id': product['id']}) sync_plan = self._make_sync_plan({ 'enabled': 'true', 'interval': 'hourly', 'organization-id': self.org['id'], 'sync-date': (datetime.utcnow() - timedelta(seconds=interval - delay) ).strftime("%Y-%m-%d %H:%M:%S"), }) # Associate sync plan with product Product.set_sync_plan({ 'id': product['id'], 'sync-plan-id': sync_plan['id'], }) # Verify product has not been synced yet self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay / 4, product['name'])) sleep(delay / 4) with self.assertRaises(AssertionError): self.validate_task_status(repo['id'], max_tries=2) self.validate_repo_content(repo, ['errata', 'packages'], after_sync=False) # Wait until the first recurrence self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format(delay, product['name'])) sleep(delay * 3 / 4) # Re-calculate and Update with the current UTC time SyncPlan.update({ u'id': sync_plan['id'], u'sync-date': (datetime.utcnow() - timedelta(seconds=interval - delay) ).strftime("%Y-%m-%d %H:%M:%S"), }) # Verify product was synced successfully self.validate_task_status(repo['id'], repo_name=repo['name']) self.validate_repo_content(repo, ['errata', 'package-groups', 'packages'])
def test_positive_update_key_for_repo_from_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it to repository from custom product that has more than one repository then update the key :id: 773a9141-9f04-40ba-b3df-4b6d80db25a6 :expectedresults: gpg key is associated with a single repository before/after update and not associated with product or other repositories :CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create repositories and assign them to the product repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] # Associate gpg key with a single repository Repository.update({ 'gpg-key': gpg_key['name'], 'id': repos[0]['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key']['name'], gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the associated repository repos[0] = Repository.info({'id': repos[0]['id']}) self.assertEqual(repos[0]['gpg-key'].get('name'), new_name) # Verify changes are not reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], new_name) # Verify changes are not reflected in the rest of repositories for repo in repos[1:]: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), new_name)
def test_positive_delete_key_for_product_with_repos(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has more than one repository then delete it @id: f92d4643-1892-4f95-ae6b-fcea8e726946 @assert: gpg key is associated with product and its repositories during creation but removed from the product and the repositories after deletion @CaseLevel: Integration """ # Create product, repositories and gpg key product = make_product({'organization-id': self.org['id']}) repos = [ make_repository({'product-id': product['id']}) for _ in range(gen_integer(2, 5)) ] gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated with product and its repositories product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Delete the gpg key GPGKey.delete({ 'name': gpg_key['name'], 'organization-id': self.org['id'], }) # Verify gpg key was actually deleted with self.assertRaises(CLIReturnCodeError): GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) # Verify gpg key was disassociated from the product and its # repositories product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertNotEqual(product['gpg']['gpg-key'], gpg_key['name']) for repo in repos: repo = Repository.info({'id': repo['id']}) self.assertNotEqual(repo['gpg-key'].get('name'), gpg_key['name'])
def test_positive_sync_by_label(self): """@Test: Check if product can be synchronized by its label. @Feature: Product @Assert: Product is synchronized """ org = make_org() product = make_product({'organization-id': org['id']}) make_repository({'product-id': product['id']}) Product.synchronize({ 'id': product['id'], 'organization-label': org['label'], }) product = Product.info({ 'id': product['id'], 'organization-id': org['id'], }) self.assertEqual(u'Syncing Complete.', product['sync-state'])
def create(self, organization_id, product_id, download_policy=None, synchronize=True): repo_info = make_repository({ 'product-id': product_id, 'content-type': self.content_type, 'url': self.url, 'docker-upstream-name': self.upstream_name, }) self._repo_info = repo_info if synchronize: self.synchronize() return repo_info
def test_positive_update_key_for_product_with_repo(self): """Create gpg key with valid name and valid gpg key via file import then associate it with custom product that has one repository then update the key @id: 3fb550a7-507e-4988-beb6-35bdfc2e99a8 @assert: gpg key is associated with product before/after update as well as with the repository @CaseLevel: Integration """ # Create a product and a gpg key product = make_product({'organization-id': self.org['id']}) gpg_key = make_gpg_key({'organization-id': self.org['id']}) # Create a repository and assign it to the product repo = make_repository({'product-id': product['id']}) # Associate gpg key with a product Product.update({ 'gpg-key': gpg_key['name'], 'id': product['id'], 'organization-id': self.org['id'], }) # Verify gpg key was associated product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) repo = Repository.info({'id': repo['id']}) self.assertEqual(product['gpg']['gpg-key'], gpg_key['name']) self.assertEqual(repo['gpg-key'].get('name'), gpg_key['name']) # Update the gpg key new_name = gen_choice(valid_data_list()) GPGKey.update({ 'name': gpg_key['name'], 'new-name': new_name, 'organization-id': self.org['id'], }) # Verify changes are reflected in the gpg key gpg_key = GPGKey.info({ 'id': gpg_key['id'], 'organization-id': self.org['id'], }) self.assertEqual(gpg_key['name'], new_name) # Verify changes are reflected in the product product = Product.info({ 'id': product['id'], 'organization-id': self.org['id'], }) self.assertEqual(product['gpg']['gpg-key'], new_name) # Verify changes are reflected in the repository repo = Repository.info({'id': repo['id']}) self.assertEqual(repo['gpg-key'].get('id'), gpg_key['id'])