def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest): """Sync CDN based ostree repository. :id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5 :Steps: 1. Import a valid manifest 2. Enable the OStree repo and sync it :expectedresults: ostree repo should be synced successfully from CDN :CaseLevel: Integration :BZ: 1625783 """ enable_rhrepo_and_fetchid( basearch=None, org_id=module_org_with_manifest.id, product=PRDS['rhah'], repo=REPOS['rhaht']['name'], reposet=REPOSET['rhaht'], releasever=None, ) with session: session.organization.select(org_name=module_org_with_manifest.name) results = session.sync_status.synchronize([(PRDS['rhah'], REPOS['rhaht']['name'])]) assert len(results) == 1 assert results[0] == 'Syncing Complete.'
def test_positive_sync_rh_ostree_repo(session, module_org_with_manifest): """Sync CDN based ostree repository. :id: 4d28fff0-5fda-4eee-aa0c-c5af02c31de5 :Steps: 1. Import a valid manifest 2. Enable the OStree repo and sync it :expectedresults: ostree repo should be synced successfully from CDN :CaseLevel: Integration """ enable_rhrepo_and_fetchid( basearch=None, org_id=module_org_with_manifest.id, product=PRDS['rhah'], repo=REPOS['rhaht']['name'], reposet=REPOSET['rhaht'], releasever=None, ) with session: session.organization.select(org_name=module_org_with_manifest.name) results = session.sync_status.synchronize([ (PRDS['rhah'], REPOS['rhaht']['name'])]) assert len(results) == 1 assert results[0] == 'Syncing Complete.'
def setUpClass(cls): """Creates the pre-requisites for the Incremental updates that used in all test""" super(IncrementalUpdateTestCase, cls).setUpClass() # Create a new Organization cls.org = Organization(name=gen_alpha()).create() # Create two lifecycle environments - DEV, QE cls.dev_lce = LifecycleEnvironment( name='DEV', organization=cls.org ).create() cls.qe_lce = LifecycleEnvironment( name='QE', prior=cls.dev_lce, organization=cls.org ).create() # Upload manifest with manifests.clone() as manifest: upload_manifest(cls.org.id, manifest.content) # Enable repositories - RHE Virtualization Agents and rhel6 sat6tools rhva_6_repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=cls.org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) rhel6_sat6tools_repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=cls.org.id, product=PRDS['rhel'], repo=REPOS['rhst6']['name'], reposet=REPOSET['rhst6'], releasever=None, ) # Read the repositories cls.rhva_6_repo = Repository(id=rhva_6_repo_id).read() cls.rhel6_sat6tools_repo = Repository( id=rhel6_sat6tools_repo_id ).read() # Sync the enabled repositories try: cls.old_task_timeout = entity_mixins.TASK_TIMEOUT # Update timeout to 15 minutes to finish sync entity_mixins.TASK_TIMEOUT = 900 for repo in [cls.rhva_6_repo, cls.rhel6_sat6tools_repo]: assert Repository(id=repo.id).sync()['result'] == u'success' finally: entity_mixins.TASK_TIMEOUT = cls.old_task_timeout
def test_positive_add_rh_and_custom_products(session): """Test that RH/Custom product can be associated to Activation keys :id: 3d8876fa-1412-47ca-a7a4-bce2e8baf3bc :Steps: 1. Create Activation key 2. Associate RH product(s) to Activation Key 3. Associate custom product(s) to Activation Key :expectedresults: RH/Custom product is successfully associated to Activation key :CaseLevel: Integration """ name = gen_string('alpha') rh_repo = { 'name': REPOS['rhva6']['name'], 'product': PRDS['rhel'], 'reposet': REPOSET['rhva6'], 'basearch': DEFAULT_ARCHITECTURE, 'releasever': DEFAULT_RELEASE_VERSION, } custom_product_name = gen_string('alpha') repo_name = gen_string('alpha') org = entities.Organization().create() product = entities.Product(name=custom_product_name, organization=org).create() repo = entities.Repository(name=repo_name, product=product).create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rhel_repo_id = enable_rhrepo_and_fetchid( basearch=rh_repo['basearch'], org_id=org.id, product=rh_repo['product'], repo=rh_repo['name'], reposet=rh_repo['reposet'], releasever=rh_repo['releasever'], ) for repo_id in [rhel_repo_id, repo.id]: entities.Repository(id=repo_id).sync() with session: session.organization.select(org.name) session.activationkey.create({ 'name': name, 'lce': { ENVIRONMENT: True }, 'content_view': DEFAULT_CV }) assert session.activationkey.search(name)[0]['Name'] == name for subscription in (DEFAULT_SUBSCRIPTION_NAME, custom_product_name): session.activationkey.add_subscription(name, subscription) ak = session.activationkey.read(name, widget_names='subscriptions') subscriptions = [ subscription['Repository Name'] for subscription in ak['subscriptions']['resources']['assigned'] ] assert {DEFAULT_SUBSCRIPTION_NAME, custom_product_name} == set(subscriptions)
def test_positive_sync_kickstart_check_os(self, module_manifest_org, distro): """Sync rhel KS repo and assert that OS was created :id: f84bcf1b-717e-40e7-82ee-000eead45249 :Parametrized: Yes :steps: 1. Enable and sync a kickstart repo. 2. Check that OS with corresponding version. :expectedresults: 1. OS with corresponding version was created. """ repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_manifest_org.id, product=constants.REPOS['kickstart'][distro]['product'], reposet=constants.REPOSET['kickstart'][distro], repo=constants.REPOS['kickstart'][distro]['name'], releasever=constants.REPOS['kickstart'][distro]['version'], ) rh_repo = entities.Repository(id=repo_id).read() rh_repo.sync() major, minor = constants.REPOS['kickstart'][distro]['version'].split('.') os = entities.OperatingSystem().search( query={'search': f'name="RedHat" AND major="{major}" AND minor="{minor}"'} ) assert len(os)
def test_positive_expired_SCA_cert_handling(module_org, rhel7_contenthost, default_sat): """Verify that a content host with an expired SCA cert can re-register successfully :id: 27bca6b8-dd9c-4977-81d2-319588ee59b3 :steps: 1. Import an SCA-enabled manifest 2. Register a content host to the Default Organization View using an activation key 3. Unregister the content host 4. Enable and synchronize a repository 5. Re-register the host using the same activation key as in step 3 above :expectedresults: the host is re-registered successfully and its SCA entitlement certificate is refreshed :CustomerScenario: true :Assignee: dsynk :BZ: 1949353 :parametrized: yes :CaseImportance: High """ with manifests.clone(name='golden_ticket') as manifest: upload_manifest(module_org.id, manifest.content) ak = entities.ActivationKey( content_view=module_org.default_content_view, max_hosts=100, organization=module_org, environment=entities.LifecycleEnvironment(id=module_org.library.id), auto_attach=True, ).create() # registering the content host with no content enabled/synced in the org # should create a client SCA cert with no content rhel7_contenthost.install_katello_ca(default_sat) rhel7_contenthost.register_contenthost(org=module_org.label, activation_key=ak.name) assert rhel7_contenthost.subscribed rhel7_contenthost.unregister() # syncing content with the content host unregistered should invalidate # the previous client SCA cert rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() # re-registering the host should test whether Candlepin gracefully handles # registration of a host with an expired SCA cert rhel7_contenthost.register_contenthost(module_org.label, ak.name) assert rhel7_contenthost.subscribed
def test_positive_list_updated(self): """View all errata in an Org sorted by Updated :id: 560d6584-70bd-4d1b-993a-cc7665a9e600 :Setup: Errata synced on satellite server. :Steps: GET /katello/api/errata :expectedresults: Errata is filtered by Org and sorted by Updated date. :CaseLevel: System """ repo = entities.Repository(name=REPOS['rhva6']['name']).search( query={'organization_id': self.org.id}) if repo: repo = repo[0] else: repo_with_cves_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=self.org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) repo = entities.Repository(id=repo_with_cves_id) self.assertEqual(repo.sync()['result'], 'success') erratum_list = entities.Errata(repository=repo).search(query={ 'order': 'updated ASC', 'per_page': 1000 }) updated = [errata.updated for errata in erratum_list] self.assertEqual(updated, sorted(updated))
def test_positive_sort_by_issued_date(self): """Filter errata by issued date :id: 6b4a783a-a7b4-4af4-b9e6-eb2928b7f7c1 :Setup: Errata synced on satellite server. :Steps: GET /katello/api/errata :expectedresults: Errata is sorted by issued date. :CaseLevel: System """ repo = entities.Repository(name=REPOS['rhva6']['name']).search( query={'organization_id': self.org.id}) if repo: repo = repo[0] else: repo_with_cves_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=self.org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) repo = entities.Repository(id=repo_with_cves_id) self.assertEqual(repo.sync()['result'], 'success') erratum_list = entities.Errata(repository=repo).search(query={ 'order': 'issued ASC', 'per_page': 1000 }) issued = [errata.issued for errata in erratum_list] self.assertEqual(issued, sorted(issued))
def golden_ticket_host_setup(): org = entities.Organization().create() with manifests.clone(name='golden_ticket') as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_product = entities.Product(organization=org).create() custom_repo = entities.Repository( name=gen_string('alphanumeric').upper(), product=custom_product ).create() custom_repo.sync() ak = entities.ActivationKey( content_view=org.default_content_view, max_hosts=100, organization=org, environment=entities.LifecycleEnvironment(id=org.library.id), auto_attach=True, ).create() subscription = entities.Subscription(organization=org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] ak.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) return org, ak
def golden_ticket_host_setup(): org = entities.Organization().create() with manifests.clone(name='golden_ticket') as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_product = entities.Product(organization=org).create() custom_repo = entities.Repository(name=gen_string('alphanumeric').upper(), product=custom_product).create() custom_repo.sync() ak = entities.ActivationKey( content_view=org.default_content_view, max_hosts=100, organization=org, environment=entities.LifecycleEnvironment(id=org.library.id), auto_attach=True, ).create() return org, ak
def test_positive_list_updated(self): """View all errata in an Org sorted by Updated :id: 560d6584-70bd-4d1b-993a-cc7665a9e600 :Setup: Errata synced on satellite server. :Steps: GET /katello/api/errata :expectedresults: Errata is filtered by Org and sorted by Updated date. :CaseLevel: System """ repo = entities.Repository(name=REPOS['rhva6']['name']).search( query={'organization_id': self.org.id}) if repo: repo = repo[0] else: repo_with_cves_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=self.org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) repo = entities.Repository(id=repo_with_cves_id) self.assertEqual(repo.sync()['result'], 'success') erratum_list = entities.Errata(repository=repo).search(query={ 'order': 'updated ASC', 'per_page': 1000, }) updated = [errata.updated for errata in erratum_list] self.assertEqual(updated, sorted(updated))
def test_redhat_sync_1(self): """@Test: Sync RedHat Repository. @Feature: Repositories @Assert: Repository synced should fetch the data successfully. """ cloned_manifest_path = manifests.clone() org_id = entities.Organization().create()['id'] repo = "Red Hat Enterprise Linux 6 Server - RH Common RPMs x86_64 6.3" task = entities.Organization( id=org_id).upload_manifest(path=cloned_manifest_path) self.assertEqual( u'success', task['result'], task['humanized']['errors'] ) repo_id = utils.enable_rhrepo_and_fetchid( "x86_64", org_id, "Red Hat Enterprise Linux Server", repo, "Red Hat Enterprise Linux 6 Server - RH Common (RPMs)", "6.3", ) task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual( task_result, u'success', u"Sync for repository '{0}' failed.".format(repo))
def enable_rhel_subscriptions(module_target_sat, module_org, manifest, version): """Enable and sync RHEL rpms repos""" major = version.split('.')[0] minor = "" if major == '8': repo_names = ['rhel8_bos', 'rhel8_aps'] minor = version[1:] else: repo_names = ['rhel7'] rh_repos = [] tasks = [] for name in repo_names: rh_repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=module_org.id, product=REPOS[name]['product'], repo=REPOS[name]['name'] + minor, reposet=REPOS[name]['reposet'], releasever=REPOS[name]['releasever'] + minor, ) # Sync step because repo is not synced by default rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() task = rh_repo.sync(synchronous=False) tasks.append(task) rh_repos.append(rh_repo) for task in tasks: wait_for_tasks( search_query=(f'id = {task["id"]}'), poll_timeout=2500, ) task_status = module_target_sat.api.ForemanTask(id=task['id']).poll() assert task_status['result'] == 'success' return rh_repos
def setup_to_create_cv(self, repo_name=None, repo_url=None, repo_type=None, rh_repo=None, org_id=None): """Create product/repo and sync it""" if not rh_repo: repo_name = repo_name or gen_string('alpha') # Creates new custom product via API's product = entities.Product( organization=org_id or self.organization ).create() # Creates new custom repository via API's repo_id = entities.Repository( name=repo_name, url=(repo_url or FAKE_1_YUM_REPO), content_type=(repo_type or REPO_TYPE['yum']), product=product, ).create().id elif rh_repo: # Uploads the manifest and returns the result. with open(manifests.clone(), 'rb') as manifest: upload_manifest(org_id, manifest) # Enables the RedHat repo and fetches it's Id. repo_id = enable_rhrepo_and_fetchid( basearch=rh_repo['basearch'], org_id=str(org_id), # OrgId is passed as data in API hence str product=rh_repo['product'], repo=rh_repo['name'], reposet=rh_repo['reposet'], releasever=rh_repo['releasever'], ) # Sync repository entities.Repository(id=repo_id).sync()
def setup_content(): org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_repo = entities.Repository( product=entities.Product(organization=org).create(), ).create() custom_repo.sync() lce = entities.LifecycleEnvironment(organization=org).create() cv = entities.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() cvv = cv.read().version[0].read() promote(cvv, lce.id) ak = entities.ActivationKey( content_view=cv, max_hosts=100, organization=org, environment=lce, auto_attach=True ).create() subscription = entities.Subscription(organization=org).search( query={'search': f'name="{DEFAULT_SUBSCRIPTION_NAME}"'} )[0] ak.add_subscriptions(data={'quantity': 1, 'subscription_id': subscription.id}) return ak, org
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @id: 73a456fb-ad17-4921-b57c-27fc8e432a83 @Assert: Product is synchronized successfully. @BZ: 1279539 @CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds plan_name = gen_string('alpha') org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() startdate = self.get_client_datetime() with Session(self.browser) as session: make_syncplan( session, org=org.name, name=plan_name, description='sync plan create with start time', interval=u'hourly', start_hour=startdate.strftime('%H'), start_minute=startdate.strftime('%M'), ) # Associate sync plan with product self.syncplan.update(plan_name, add_products=[PRDS['rhel']]) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], after_sync=False, ) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], )
def test_positive_sort_by_issued_date(self): """Filter errata by issued date :id: 6b4a783a-a7b4-4af4-b9e6-eb2928b7f7c1 :Setup: Errata synced on satellite server. :Steps: GET /katello/api/errata :expectedresults: Errata is sorted by issued date. :CaseLevel: System """ repo = entities.Repository(name=REPOS['rhva6']['name']).search( query={'organization_id': self.org.id}) if repo: repo = repo[0] else: repo_with_cves_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=self.org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) repo = entities.Repository(id=repo_with_cves_id) self.assertEqual(repo.sync()['result'], 'success') erratum_list = entities.Errata(repository=repo).search(query={ 'order': 'issued ASC', 'per_page': 1000, }) issued = [errata.issued for errata in erratum_list] self.assertEqual(issued, sorted(issued))
def test_redhat_sync_1(self): """@Test: Sync RedHat Repository. @Feature: Repositories @Assert: Repository synced should fetch the data successfully. """ cloned_manifest_path = manifests.clone() org_id = entities.Organization().create()['id'] repo = "Red Hat Enterprise Linux 6 Server - RH Common RPMs x86_64 6.3" task = entities.Organization(id=org_id).upload_manifest( path=cloned_manifest_path) self.assertEqual(u'success', task['result'], task['humanized']['errors']) repo_id = utils.enable_rhrepo_and_fetchid( "x86_64", org_id, "Red Hat Enterprise Linux Server", repo, "Red Hat Enterprise Linux 6 Server - RH Common (RPMs)", "6.3", ) task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual(task_result, u'success', u"Sync for repository '{0}' failed.".format(repo))
def test_positive_synchronize_rh_product_future_sync_date(module_org): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 6697a00f-2181-4c2b-88eb-2333268d780b :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 2 * 60 # delay for sync date in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload(data={'organization_id': org.id}, files={'content': manifest.content}) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] repo = entities.Repository(id=repo_id).read() if is_open('BZ:1695733'): logger.info('Need to set seconds to zero because BZ:1695733') sync_date = datetime.utcnow().replace(second=0) + timedelta( seconds=delay) else: sync_date = (datetime.utcnow() + timedelta(seconds=delay), ) sync_plan = entities.SyncPlan(organization=org, enabled=True, interval='hourly', sync_date=sync_date).create() # Create and Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product is not synced and doesn't have any content with pytest.raises(AssertionError): validate_task_status(repo.id, max_tries=1) validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait quarter of expected time logger.info( f'Waiting {delay / 4} seconds to check product {product.name} was not synced' ) sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(repo.id, max_tries=1) validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time logger.info( f'Waiting {delay * 3 / 4} seconds to check product {product.name} was synced' ) sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(repo.id, repo_backend_id=repo.backend_identifier) validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds plan_name = gen_string('alpha') org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() startdate = datetime.now() with Session(self.browser) as session: make_syncplan( session, org=org.name, name=plan_name, description='sync plan create with start time', interval=u'hourly', start_hour=startdate.strftime('%H'), start_minute=startdate.strftime('%M'), ) # Associate sync plan with product self.syncplan.update( plan_name, add_products=[PRDS['rhel']]) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], after_sync=False, ) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], )
def test_positive_synchronize_rh_product_past_sync_date(self): """Create a sync plan with past datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence :id: 080c316d-4a06-4ee9-b5f6-1b210d8d0593 :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 4 * 60 org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow() - timedelta(seconds=interval - delay), ).create() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product has not been synced yet self.logger.info('Waiting {0} seconds to check product {1}' ' was not synced'.format(delay / 4, product.name)) sleep(delay / 4) with self.assertRaises(AssertionError): self.validate_task_status(repo.id, max_tries=2) self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait until the next recurrence self.logger.info('Waiting {0} seconds to check product {1}' ' was synced'.format(delay, product.name)) sleep(delay * 3 / 4) # Verify product was synced successfully self.validate_task_status(repo.id) self.validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 193d0159-d4a7-4f50-b037-7289f4576ade :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds plan_name = gen_string('alpha') org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() with Session(self) as session: startdate = (self.get_client_datetime(session.browser) + timedelta(seconds=delay)) make_syncplan( session, org=org.name, name=plan_name, description='sync plan create with start time', interval=u'hourly', start_hour=startdate.strftime('%H'), start_minute=startdate.strftime('%M'), ) # Associate sync plan with product self.syncplan.update(plan_name, add_products=[PRDS['rhel']]) # Wait half of expected time sleep(delay / 2) # Verify product has not been synced yet self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], after_sync=False, ) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], )
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. @id: 193d0159-d4a7-4f50-b037-7289f4576ade @Assert: Product is synchronized successfully. @CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds plan_name = gen_string('alpha') org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() startdate = datetime.now() + timedelta(seconds=delay) with Session(self.browser) as session: make_syncplan( session, org=org.name, name=plan_name, description='sync plan create with start time', interval=u'hourly', start_hour=startdate.strftime('%H'), start_minute=startdate.strftime('%M'), ) # Associate sync plan with product self.syncplan.update( plan_name, add_products=[PRDS['rhel']]) # Wait half of expected time sleep(delay / 2) # Verify product has not been synced yet self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], after_sync=False, ) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully self.validate_repo_content( PRDS['rhel'], repo.name, ['errata', 'package_groups', 'packages'], )
def enable_rhel_repo(self, organization_id): return utils.enable_rhrepo_and_fetchid( basearch=constants.DEFAULT_ARCHITECTURE, org_id=organization_id, product=constants.PRDS['rhel'], repo=self.REPOS['rhel']['name'], reposet=self.REPOSET['rhel'], releasever=None, )
def setup_content_rhel6(): """Setup content fot rhel6 content host Using `Red Hat Enterprise Virtualization Agents for RHEL 6 Server (RPMs)` from manifest, SATTOOLS_REPO for host-tools and yum_9 repo as custom repo. :return: Activation Key, Organization, subscription list """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id_rhva = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=constants.PRDS['rhel'], repo=constants.REPOS['rhva6']['name'], reposet=constants.REPOSET['rhva6'], releasever=constants.DEFAULT_RELEASE_VERSION, ) rh_repo = entities.Repository(id=rh_repo_id_rhva).read() rh_repo.sync() host_tools_product = entities.Product(organization=org).create() host_tools_repo = entities.Repository( product=host_tools_product, ).create() host_tools_repo.url = settings.repos.SATTOOLS_REPO.RHEL6 host_tools_repo = host_tools_repo.update(['url']) host_tools_repo.sync() custom_product = entities.Product(organization=org).create() custom_repo = entities.Repository( product=custom_product, ).create() custom_repo.url = CUSTOM_REPO_URL custom_repo = custom_repo.update(['url']) custom_repo.sync() lce = entities.LifecycleEnvironment(organization=org).create() cv = entities.ContentView( organization=org, repository=[rh_repo_id_rhva, host_tools_repo.id, custom_repo.id], ).create() cv.publish() cvv = cv.read().version[0].read() promote(cvv, lce.id) ak = entities.ActivationKey(content_view=cv, organization=org, environment=lce).create() sub_list = [DEFAULT_SUBSCRIPTION_NAME, host_tools_product.name, custom_product.name] for sub_name in sub_list: subscription = entities.Subscription(organization=org).search( query={'search': f'name="{sub_name}"'} )[0] ak.add_subscriptions(data={'subscription_id': subscription.id}) return ak, org, sub_list
def test_positive_synchronize_rh_product_future_sync_date(session): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 193d0159-d4a7-4f50-b037-7289f4576ade :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 5 * 60 # delay for sync date in seconds plan_name = gen_string('alpha') org = entities.Organization().create() manifests.upload_manifest_locked(org.id) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() with session: session.organization.select(org_name=org.name) startdate = (session.browser.get_client_datetime() + timedelta(seconds=delay)) session.syncplan.create({ 'name': plan_name, 'interval': SYNC_INTERVAL['week'], 'date_time.start_date': startdate.strftime("%Y-%m-%d"), 'date_time.hours': startdate.strftime('%H'), 'date_time.minutes': startdate.strftime('%M'), }) assert session.syncplan.search(plan_name)[0]['Name'] == plan_name session.syncplan.add_product(plan_name, PRDS['rhel']) # Waiting part of delay and check that product was not synced time.sleep(delay / 4) with raises(AssertionError) as context: validate_task_status(repo.id, max_tries=2) assert 'No task was found using query' in str(context.value) validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Waiting part of delay that left and check that product was synced time.sleep(delay * 3 / 4) validate_task_status(repo.id, repo_backend_id=repo.backend_identifier) validate_repo_content(repo, ['erratum', 'package', 'package_group']) repo_values = session.repository.read(PRDS['rhel'], repo.name) for repo_type in ['Packages', 'Errata', 'Package Groups']: assert int(repo_values['content_counts'][repo_type]) > 0
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. @id: 6697a00f-2181-4c2b-88eb-2333268d780b @Assert: Product is synchronized successfully. @CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow() + timedelta(seconds=delay), ).create() repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product is not synced and doesn't have any content self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait half of expected time sleep(delay / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time sleep(delay / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def test_positive_fetch_product_content(self): """Associate RH & custom product with AK and fetch AK's product content :id: 424f3dfb-0112-464b-b633-e8c9bce6e0f1 :expectedresults: Both Red Hat and custom product subscriptions are assigned as Activation Key's product content :BZ: 1426386 :CaseLevel: Integration :CaseImportance: Critical """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_repo = entities.Repository( product=entities.Product(organization=org).create(), ).create() custom_repo.sync() cv = entities.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() ak = entities.ActivationKey(content_view=cv, organization=org).create() org_subscriptions = entities.Subscription(organization=org).search() for subscription in org_subscriptions: provided_products_ids = [ prod.id for prod in subscription.read().provided_product ] if (custom_repo.product.id in provided_products_ids or rh_repo.product.id in provided_products_ids): ak.add_subscriptions(data={ 'quantity': 1, 'subscription_id': subscription.id, }) ak_subscriptions = ak.product_content()['results'] self.assertEqual( {custom_repo.product.id, rh_repo.product.id}, {subscr['product']['id'] for subscr in ak_subscriptions})
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @id: 080c316d-4a06-4ee9-b5f6-1b210d8d0593 @Assert: Product is synchronized successfully. @BZ: 1279539 @CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow(), ).create() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content(repo, ['erratum', 'package', 'package_group'])
def module_rh_repo(module_org): manifests.upload_manifest_locked(module_org.id, manifests.clone()) rhst = SatelliteToolsRepository(cdn=True) repo_id = enable_rhrepo_and_fetchid( basearch=rhst.data['arch'], org_id=module_org.id, product=rhst.data['product'], repo=rhst.data['repository'], reposet=rhst.data['repository-set'], releasever=rhst.data['releasever'], ) entities.Repository(id=repo_id).sync() return entities.Repository(id=repo_id).read()
def sat6tools_repo(default_org): """Enable Sat tools repository""" sat6tools_repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=default_org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) sat6tools_repo = entities.Repository(id=sat6tools_repo_id).read() assert sat6tools_repo.sync()['result'] == 'success' return sat6tools_repo
def test_positive_synchronize_rh_product_past_sync_date(self): """Create a sync plan with past datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence :id: 080c316d-4a06-4ee9-b5f6-1b210d8d0593 :expectedresults: Product is synchronized successfully. :BZ: 1279539 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 80 org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow() - timedelta(interval - delay/2), ).create() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product has not been synced yet sleep(delay/4) self.validate_repo_content( repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait until the next recurrence sleep(delay) # Verify product was synced successfully self.validate_repo_content( repo, ['erratum', 'package', 'package_group'])
def test_positive_synchronize_rh_product_future_sync_date(self): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 6697a00f-2181-4c2b-88eb-2333268d780b :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 10 * 60 # delay for sync date in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow() + timedelta(seconds=delay), ).create() repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Verify product is not synced and doesn't have any content self.validate_repo_content( repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait half of expected time sleep(delay/2) # Verify product has not been synced yet self.validate_repo_content( repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time sleep(delay/2) # Verify product was synced successfully self.validate_repo_content( repo, ['erratum', 'package', 'package_group'])
def test_positive_sync_kickstart_repo(self, module_manifest_org, default_sat): """No encoding gzip errors on kickstart repositories sync. :id: dbdabc0e-583c-4186-981a-a02844f90412 :expectedresults: No encoding gzip errors present in /var/log/messages. :CaseLevel: Integration :customerscenario: true :steps: 1. Sync a kickstart repository. 2. After the repo is synced, change the download policy to immediate. 3. Sync the repository again. 4. Assert that no errors related to encoding gzip are present in /var/log/messages. 5. Assert that sync was executed properly. :CaseComponent: Pulp :Assignee: ltran :BZ: 1687801 """ rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_manifest_org.id, product=constants.PRDS['rhel8'], repo=constants.REPOS['rhel8_bos_ks']['name'], reposet=constants.REPOSET['rhel8_bos_ks'], releasever='8.4', ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() rh_repo.download_policy = 'immediate' rh_repo = rh_repo.update(['download_policy']) call_entity_method_with_timeout(rh_repo.sync, timeout=600) result = default_sat.execute( 'grep pulp /var/log/messages | grep failed | grep encoding | grep gzip' ) assert result.status == 1 assert not result.stdout rh_repo = rh_repo.read() assert rh_repo.content_counts['package'] > 0 assert rh_repo.content_counts['package_group'] > 0 assert rh_repo.content_counts['rpm'] > 0
def test_positive_synchronize_rh_product_future_sync_date(session): """Create a sync plan with sync date in a future and sync one RH product with it automatically. :id: 193d0159-d4a7-4f50-b037-7289f4576ade :expectedresults: Product is synchronized successfully. :CaseLevel: System """ delay = 5 * 60 # delay for sync date in seconds plan_name = gen_string('alpha') org = entities.Organization().create() manifests.upload_manifest_locked(org.id) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) repo = entities.Repository(id=repo_id).read() with session: session.organization.select(org_name=org.name) startdate = ( session.browser.get_client_datetime() + timedelta(seconds=delay)) session.syncplan.create({ 'name': plan_name, 'interval': SYNC_INTERVAL['week'], 'date_time.start_date': startdate.strftime("%Y-%m-%d"), 'date_time.hours': startdate.strftime('%H'), 'date_time.minutes': startdate.strftime('%M'), }) assert session.syncplan.search(plan_name)[0]['Name'] == plan_name session.syncplan.add_product(plan_name, PRDS['rhel']) # Waiting part of delay and check that product was not synced time.sleep(delay / 4) with raises(AssertionError) as context: validate_task_status(repo.id, max_tries=2) assert 'No task was found using query' in str(context.value) validate_repo_content( repo, ['erratum', 'package', 'package_group'], after_sync=False) # Waiting part of delay that left and check that product was synced time.sleep(delay * 3 / 4) validate_task_status(repo.id, repo_backend_id=repo.backend_identifier) validate_repo_content(repo, ['erratum', 'package', 'package_group']) repo_values = session.repository.read(PRDS['rhel'], repo.name) for repo_type in ['Packages', 'Errata', 'Package Groups']: assert int(repo_values['content_counts'][repo_type]) > 0
def setUpClass(cls): super(RHPackagesTestCase, cls).setUpClass() cls.organization = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(cls.organization.id, manifest.content) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=cls.organization.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) entities.Repository(id=repo_id).sync()
def test_positive_fetch_product_content(session): """Associate RH & custom product with AK and fetch AK's product content :id: 4c37fb12-ea2a-404e-b7cc-a2735e8dedb6 :expectedresults: Both Red Hat and custom product subscriptions are assigned as Activation Key's product content :BZ: 1426386, 1432285 :CaseLevel: Integration """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=constants.PRDS['rhel'], repo=constants.REPOS['rhst7']['name'], reposet=constants.REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_product = entities.Product(organization=org).create() custom_repo = entities.Repository( name=gen_string('alphanumeric').upper(), # first letter is always # uppercase on product content page, workarounding it for # successful checks product=custom_product, ).create() custom_repo.sync() cv = entities.ContentView(organization=org, repository=[rh_repo_id, custom_repo.id]).create() cv.publish() ak = entities.ActivationKey(content_view=cv, organization=org).create() with session: session.organization.select(org.name) for subscription in (constants.DEFAULT_SUBSCRIPTION_NAME, custom_product.name): session.activationkey.add_subscription(ak.name, subscription) ak = session.activationkey.read(ak.name, widget_names='repository_sets') reposets = [ reposet['Repository Name'] for reposet in ak['repository_sets']['table'] ] assert {custom_repo.name, constants.REPOSET['rhst7']} == set(reposets)
def test_positive_fetch_product_content(self): """Associate RH & custom product with AK and fetch AK's product content :id: 424f3dfb-0112-464b-b633-e8c9bce6e0f1 :expectedresults: Both Red Hat and custom product subscriptions are assigned as Activation Key's product content :BZ: 1426386 :CaseLevel: Integration """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_repo = entities.Repository( product=entities.Product(organization=org).create(), ).create() custom_repo.sync() cv = entities.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() ak = entities.ActivationKey(content_view=cv, organization=org).create() org_subscriptions = entities.Subscription(organization=org).search() for subscription in org_subscriptions: provided_products_ids = [ prod.id for prod in subscription.read().provided_product] if (custom_repo.product.id in provided_products_ids or rh_repo.product.id in provided_products_ids): ak.add_subscriptions(data={ 'quantity': 1, 'subscription_id': subscription.id, }) ak_subscriptions = ak.product_content()['results'] self.assertEqual( {custom_repo.product.id, rh_repo.product.id}, {subscr['product']['id'] for subscr in ak_subscriptions} )
def enable_sync_redhat_repo(self, rh_repo, org_id=None): """Enable the RedHat repo, sync it and returns repo_id""" # Enable RH repo and fetch repository_id repo_id = enable_rhrepo_and_fetchid( basearch=rh_repo['basearch'], org_id=org_id or self.organization.id, product=rh_repo['product'], repo=rh_repo['name'], reposet=rh_repo['reposet'], releasever=rh_repo['releasever'], ) # Sync repository entities.Repository(id=repo_id).sync() return repo_id
def rh_repo(module_org): with manifests.clone(name='golden_ticket') as manifest: upload_manifest(module_org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=module_org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @Assert: Product is synchronized successfully. @Feature: SyncPlan @BZ: 1279539 """ interval = 60 * 60 # 'hourly' sync interval in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u'hourly', sync_date=datetime.utcnow(), ).create() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content( repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content( repo, ['erratum', 'package', 'package_group'])
def rh_repo_gt_manifest(module_gt_manifest_org): """Use GT manifest org, creates RH tools repo, syncs and returns RH repo.""" # enable rhel repo and return its ID rh_repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=module_gt_manifest_org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) # Sync step because repo is not synced by default rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() return rh_repo
def test_positive_fetch_product_content(session): """Associate RH & custom product with AK and fetch AK's product content :id: 4c37fb12-ea2a-404e-b7cc-a2735e8dedb6 :expectedresults: Both Red Hat and custom product subscriptions are assigned as Activation Key's product content :BZ: 1426386, 1432285 :CaseLevel: Integration """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) rh_repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) rh_repo = entities.Repository(id=rh_repo_id).read() rh_repo.sync() custom_product = entities.Product(organization=org).create() custom_repo = entities.Repository( name=gen_string('alphanumeric').upper(), # first letter is always # uppercase on product content page, workarounding it for # successful checks product=custom_product).create() custom_repo.sync() cv = entities.ContentView( organization=org, repository=[rh_repo_id, custom_repo.id], ).create() cv.publish() ak = entities.ActivationKey(content_view=cv, organization=org).create() with session: session.organization.select(org.name) for subscription in (DEFAULT_SUBSCRIPTION_NAME, custom_product.name): session.activationkey.add_subscription(ak.name, subscription) ak = session.activationkey.read(ak.name) reposets = [ reposet['Repository Name'] for reposet in ak['repository_sets']['resources'] ] assert {custom_repo.name, REPOSET['rhst7']} == set(reposets)
def setup_to_create_cv(self, session, cv_name, repo_name=None, repo_url=None, repo_type=None, rh_repo=None): """Create product/repo and sync it and create CV""" cv_name = cv_name or gen_string("alpha", 8) if not rh_repo: repo_name = repo_name or gen_string("alpha", 8) # Creates new custom product via API's product_attrs = entities.Product( organization=self.org_id ).create() # Creates new custom repository via API's repo_attrs = entities.Repository( name=repo_name, url=(repo_url or FAKE_1_YUM_REPO), content_type=(repo_type or REPO_TYPE['yum']), product=product_attrs['id'], ).create() repo_id = repo_attrs['id'] elif rh_repo: # Clone the manifest and fetch it's path. manifest_path = manifests.clone() # Uploads the manifest and returns the result. task_result = entities.Organization( id=self.org_id ).upload_manifest(path=manifest_path)['result'] self.assertEqual(u'success', task_result) # Enables the RedHat repo and fetches it's Id. repo_id = utils.enable_rhrepo_and_fetchid( rh_repo['basearch'], str(self.org_id), # Org Id is passed as data in API hence str rh_repo['product'], rh_repo['name'], rh_repo['reposet'], rh_repo['releasever']) repo_name = rh_repo['name'] # Sync repository task_result = entities.Repository(id=repo_id).sync()['result'] self.assertEqual( task_result, u'success', u"Sync for repository {0} failed.".format(repo_name)) make_contentview(session, org=self.org_name, name=cv_name) self.assertIsNotNone(self.content_views.search(cv_name))
def setUpClass(cls): # noqa """Set up organization, product and repositories for tests.""" super(CVRedHatContent, cls).setUpClass() cls.org = entities.Organization().create() with open(manifests.clone(), "rb") as manifest: entities.Subscription().upload(data={"organization_id": cls.org.id}, files={"content": manifest}) repo_id = enable_rhrepo_and_fetchid( basearch="x86_64", org_id=cls.org.id, product=PRDS["rhel"], repo=REPOS["rhst7"]["name"], reposet=REPOSET["rhst7"], releasever=None, ) cls.repo = entities.Repository(id=repo_id) cls.repo.sync()
def test_positive_view_cve(self): """View CVE number(s) in Errata Details page @id: e1c2de13-fed8-448e-b618-c2adb6e82a35 @Setup: Errata synced on satellite server. @Steps: 1. Go to Content -> Errata. Select an Errata. @Assert: 1: Check if the CVE information is shown in Errata Details page. 2. Check if 'N/A' is displayed if CVE information is not present. @CaseLevel: Integration """ real_errata_id = 'RHSA-2014:1873' # rhva6 errata with CVEs real_errata_cves = 'CVE-2014-3633 , CVE-2014-3657 , CVE-2014-7823' repo_with_cves_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=self.session_org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) self.assertEqual( entities.Repository(id=repo_with_cves_id).sync()['result'], 'success' ) with Session(self.browser): self.errata.check_errata_details( real_errata_id, [['CVEs', real_errata_cves]], only_applicable=False, ) self.errata.check_errata_details( CUSTOM_REPO_ERRATA_ID, [['CVEs', 'N/A']], only_applicable=False, )
def test_positive_redhat_synchronize(self): """Sync RedHat Repository. @Feature: Repositories @Assert: Repository synced should fetch the data successfully. """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) entities.Repository(id=repo_id).sync()
def test_positive_filter_by_cve(self): """Filter errata by CVE @id: a921d4c2-8d3d-4462-ba6c-fbd4b898a3f2 @Setup: Errata synced on satellite server. @Steps: 1. GET /katello/api/errata @Assert: Errata is filtered by CVE. @CaseLevel: System """ repo = entities.Repository(name=REPOS['rhva6']['name']).search( query={'organization_id': self.org.id}) if repo: repo = repo[0] else: repo_with_cves_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=self.org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) repo = entities.Repository(id=repo_with_cves_id) self.assertEqual(repo.sync()['result'], 'success') erratum_list = entities.Errata(repository=repo).search(query={ 'order': 'cve DESC', 'per_page': 1000, }) # Most of Errata don't have any CVEs. Removing empty CVEs from results erratum_cves = [ errata.cves for errata in erratum_list if errata.cves ] # Verifying each errata have its CVEs sorted in DESC order for errata_cves in erratum_cves: cve_ids = [cve['cve_id'] for cve in errata_cves] self.assertEqual(cve_ids, sorted(cve_ids, reverse=True))
def setUpClass(cls): # noqa """Set up organization, product and repositories for tests.""" super(ContentViewRedHatContent, cls).setUpClass() cls.org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload( data={'organization_id': cls.org.id}, files={'content': manifest.content}, ) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=cls.org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) cls.repo = entities.Repository(id=repo_id) cls.repo.sync()
def test_positive_sync_rh(self): """Sync RedHat Repository. @id: d69c44cd-753c-4a75-9fd5-a8ed963b5e04 @Assert: Synced repo should fetch the data successfully. @CaseLevel: Integration """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) entities.Repository(id=repo_id).sync()
def test_positive_sync_rh_atomic(self): """Sync RH Atomic Ostree Repository. @id: 38c8aeaa-5ad2-40cb-b1d2-f0ac604f9fdd @Assert: Synced repo should fetch the data successfully. @CaseLevel: Integration """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) repo_id = enable_rhrepo_and_fetchid( org_id=org.id, product=PRDS['rhah'], repo=REPOS['rhaht']['name'], reposet=REPOSET['rhaht'], releasever=None, basearch=None, ) entities.Repository(id=repo_id).sync()
def setUpClass(cls): """Set up single org with subscription to 1 RH and 1 custom products to reuse in tests """ super(ErrataTestCase, cls).setUpClass() cls.env = entities.LifecycleEnvironment( organization=cls.session_org).create() cls.content_view = entities.ContentView( organization=cls.session_org).create() cls.activation_key = entities.ActivationKey( environment=cls.env, organization=cls.session_org, ).create() setup_org_for_a_rh_repo({ 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst7'], 'repository': REPOS['rhst7']['name'], 'organization-id': cls.session_org.id, 'content-view-id': cls.content_view.id, 'lifecycle-environment-id': cls.env.id, 'activationkey-id': cls.activation_key.id, }, force_manifest_upload=True) cls.custom_entitites = setup_org_for_a_custom_repo({ 'url': CUSTOM_REPO_URL, 'organization-id': cls.session_org.id, 'content-view-id': cls.content_view.id, 'lifecycle-environment-id': cls.env.id, 'activationkey-id': cls.activation_key.id, }) rhva_repo = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=cls.session_org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) assert entities.Repository(id=rhva_repo).sync()['result'] == 'success' cls.rhva_errata_id = REAL_4_ERRATA_ID cls.rhva_errata_cves = REAL_4_ERRATA_CVES
def test_positive_synchronize_rh_product_current_sync_date(self): """Create a sync plan with current datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence @id: 080c316d-4a06-4ee9-b5f6-1b210d8d0593 @Assert: Product is synchronized successfully. @BZ: 1279539 @CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload(data={"organization_id": org.id}, files={"content": manifest.content}) repo_id = enable_rhrepo_and_fetchid( basearch="x86_64", org_id=org.id, product=PRDS["rhel"], repo=REPOS["rhst7"]["name"], reposet=REPOSET["rhst7"], releasever=None, ) product = entities.Product(name=PRDS["rhel"], organization=org).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval=u"hourly", sync_date=datetime.utcnow() ).create() # Associate sync plan with product sync_plan.add_products(data={"product_ids": [product.id]}) # Wait half of expected time sleep(interval / 2) # Verify product has not been synced yet self.validate_repo_content(repo, ["erratum", "package", "package_group"], after_sync=False) # Wait the rest of expected time sleep(interval / 2) # Verify product was synced successfully self.validate_repo_content(repo, ["erratum", "package", "package_group"])
def setup_to_create_cv(self, repo_name=None, repo_url=None, repo_type=None, rh_repo=None, org_id=None): """Create product/repo and sync it""" if not rh_repo: repo_name = repo_name or gen_string("alpha", 8) # Creates new custom product via API's product_attrs = entities.Product( organization=org_id or self.org_id ).create_json() # Creates new custom repository via API's repo_attrs = entities.Repository( name=repo_name, url=(repo_url or FAKE_1_YUM_REPO), content_type=(repo_type or REPO_TYPE['yum']), product=product_attrs['id'], ).create_json() repo_id = repo_attrs['id'] elif rh_repo: # Clone the manifest and fetch it's path. manifest_path = manifests.clone() # Uploads the manifest and returns the result. entities.Organization(id=org_id).upload_manifest( path=manifest_path ) # Enables the RedHat repo and fetches it's Id. repo_id = utils.enable_rhrepo_and_fetchid( rh_repo['basearch'], str(org_id), # Org Id is passed as data in API hence str rh_repo['product'], rh_repo['name'], rh_repo['reposet'], rh_repo['releasever']) repo_name = rh_repo['name'] # Sync repository entities.Repository(id=repo_id).sync()
def test_redhat_sync_1(self): """@Test: Sync RedHat Repository. @Feature: Repositories @Assert: Repository synced should fetch the data successfully. """ cloned_manifest_path = manifests.clone() org_id = entities.Organization().create_json()['id'] repo = "Red Hat Enterprise Linux 6 Server - RH Common RPMs x86_64 6.3" entities.Organization(id=org_id).upload_manifest( path=cloned_manifest_path ) repo_id = utils.enable_rhrepo_and_fetchid( "x86_64", org_id, "Red Hat Enterprise Linux Server", repo, "Red Hat Enterprise Linux 6 Server - RH Common (RPMs)", "6.3", ) entities.Repository(id=repo_id).sync()
def test_positive_get_applicable_for_host(self): """Get applicable errata ids for a host :id: 51d44d51-eb3f-4ee4-a1df-869629d427ac :Setup: 1. Errata synced on satellite server. 2. Some Content hosts present. :Steps: GET /api/v2/hosts/:id/errata :expectedresults: The available errata is retrieved. :CaseLevel: System """ org = entities.Organization().create() env = entities.LifecycleEnvironment( organization=org).create() content_view = entities.ContentView( organization=org).create() activation_key = entities.ActivationKey( environment=env, organization=org, ).create() setup_org_for_a_rh_repo({ 'product': PRDS['rhel'], 'repository-set': REPOSET['rhst6'], 'repository': REPOS['rhst6']['name'], 'organization-id': org.id, 'content-view-id': content_view.id, 'lifecycle-environment-id': env.id, 'activationkey-id': activation_key.id, }, force_manifest_upload=True) setup_org_for_a_custom_repo({ 'url': CUSTOM_REPO_URL, 'organization-id': org.id, 'content-view-id': content_view.id, 'lifecycle-environment-id': env.id, 'activationkey-id': activation_key.id, }) repo_id = enable_rhrepo_and_fetchid( basearch=DEFAULT_ARCHITECTURE, org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhva6']['name'], reposet=REPOSET['rhva6'], releasever=DEFAULT_RELEASE_VERSION, ) repo = entities.Repository(id=repo_id) self.assertEqual(repo.sync()['result'], 'success') content_view = content_view.read() content_view.repository.append(repo) content_view = content_view.update(['repository']) content_view.publish() versions = sorted(content_view.read().version, key=lambda ver: ver.id) cvv = versions[-1].read() promote(cvv, env.id) with VirtualMachine(distro=DISTRO_RHEL6) as client: client.install_katello_ca() client.register_contenthost(org.label, activation_key.name) self.assertTrue(client.subscribed) client.enable_repo(REPOS['rhst6']['id']) client.enable_repo(REPOS['rhva6']['id']) client.install_katello_agent() host = entities.Host().search(query={ 'search': 'name={0}'.format(client.hostname)})[0].read() erratum = self._fetch_available_errata(host, 0) self.assertEqual(len(erratum), 0) client.run( 'yum install -y {0}'.format(FAKE_1_CUSTOM_PACKAGE)) erratum = self._fetch_available_errata(host, 1) self.assertEqual(len(erratum), 1) self.assertIn( CUSTOM_REPO_ERRATA_ID, [errata['errata_id'] for errata in erratum], ) client.run('yum install -y {0}'.format(REAL_0_RH_PACKAGE)) erratum = self._fetch_available_errata(host, 3) self.assertEqual(len(erratum), 3) self.assertTrue( {REAL_1_ERRATA_ID, REAL_2_ERRATA_ID}.issubset( {errata['errata_id'] for errata in erratum}) )