def test_positive_reposet_enable(self): """Enable repo from reposet @id: dedcecf7-613a-4e85-a3af-92fb57e2b0a1 @Assert: Repository was enabled """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] reposet = entities.RepositorySet( name=REPOSET['rhva6'], product=product, ).search()[0] reposet.enable(data={'basearch': 'x86_64', 'releasever': '6Server'}) repositories = reposet.available_repositories()['results'] self.assertTrue([ repo['enabled'] for repo in repositories if (repo['substitutions']['basearch'] == 'x86_64' and repo['substitutions']['releasever'] == '6Server') ][0])
def enable_rhrepo_and_fetchid(basearch, org_id, product, repo, reposet, releasever): """Enable a RedHat Repository and fetches it's Id. :param str org_id: The organization Id. :param str product: The product name in which repository exists. :param str reposet: The reposet name in which repository exists. :param str repo: The repository name who's Id is to be fetched. :param str basearch: The architecture of the repository. :param str optional releasever: The releasever of the repository. :return: Returns the repository Id. :rtype: str """ product = entities.Product(name=product, organization=org_id).search()[0] r_set = entities.RepositorySet(name=reposet, product=product).search()[0] payload = {} if basearch is not None: payload['basearch'] = basearch if releasever is not None: payload['releasever'] = releasever r_set.enable(data=payload) result = entities.Repository(name=repo).search( query={'organization_id': org_id}) if bz_bug_is_open(1252101): for _ in range(5): if len(result) > 0: break time.sleep(5) result = entities.Repository(name=repo).search( query={'organization_id': org_id}) return result[0].id
def test_positive_reposet_disable(self): """Disable repo from reposet @id: 60a102df-099e-4325-8924-2a31e5f738ba @Assert: Repository was disabled """ org = entities.Organization().create() with manifests.clone() as manifest: upload_manifest(org.id, manifest.content) product = entities.Product( name=PRDS['rhel'], organization=org, ).search()[0] reposet = entities.RepositorySet( name=REPOSET['rhva6'], product=product, ).search()[0] reposet.enable(data={'basearch': 'x86_64', 'releasever': '6Server'}) reposet.disable(data={'basearch': 'x86_64', 'releasever': '6Server'}) repositories = reposet.available_repositories()['results'] self.assertFalse([ repo['enabled'] for repo in repositories if (repo['substitutions']['basearch'] == 'x86_64' and repo['substitutions']['releasever'] == '6Server') ][0])
def _sync_rh_repos_to_satellite(org): """Task to sync Redhat Repositories to latest required during upgrade :param org: ```nailgun.entities.Organization``` entity of capsule :returns tuple: RHSCL and Redhat 7 Server repo name, label name and product name """ rhelver = '7' arch = 'x86_64' # Enable rhscl repository scl_product = entities.Product( name=rhelcontents['rhscl_sat64']['prod'], organization=org ).search(query={'per_page': 100})[0] scl_reposet = entities.RepositorySet( name=rhelcontents['rhscl']['repo'].format(os_ver=rhelver), product=scl_product ).search()[0] try: scl_reposet.enable( data={'basearch': arch, 'releasever': '7Server', 'organization_id': org.id}) except requests.exceptions.HTTPError as exp: logger.warn(exp) time.sleep(20) # Sync enabled Repo from cdn scl_repo = entities.Repository( name=rhelcontents['rhscl']['repofull'].format(os_ver=rhelver, arch=arch) ).search(query={'organization_id': org.id, 'per_page': 100})[0] call_entity_method_with_timeout(entities.Repository(id=scl_repo.id).sync, timeout=2500) # Enable RHEL 7 Server repository server_product = entities.Product( name=rhelcontents['server']['prod'], organization=org).search(query={'per_page': 100})[0] server_reposet = entities.RepositorySet( name=rhelcontents['server']['repo'].format(os_ver=rhelver), product=server_product ).search()[0] try: server_reposet.enable( data={'basearch': arch, 'releasever': '7Server', 'organization_id': org.id}) except requests.exceptions.HTTPError as exp: logger.warn(exp) time.sleep(20) # Sync enabled Repo from cdn server_repo = entities.Repository( name=rhelcontents['server']['repofull'].format(os_ver=rhelver, arch=arch) ).search(query={'organization_id': org.id, 'per_page': 100})[0] call_entity_method_with_timeout(entities.Repository(id=server_repo.id).sync, timeout=3600) scl_repo.repo_id = rhelcontents['rhscl']['label'].format(os_ver=rhelver) server_repo.repo_id = rhelcontents['server']['label'].format(os_ver=rhelver) return scl_repo, server_repo
def _sync_capsule_subscription_to_capsule_ak(ak): """Syncs to_version capsule contents, adds to the CV and attaches contents to the AK through which Capsule is registered :param ak: ```nailgun.entities.ActivationKey``` used for capsule subscription """ cv = ak.content_view.read() org = ak.organization capsule_repo = os.environ.get('CAPSULE_URL') to_version = os.environ.get('TO_VERSION') os_ver = os.environ.get('OS')[-1] # If custom capsule repo is not given then # enable capsule repo from Redhat Repositories if capsule_repo: cap_product = entities.Product( name=customcontents['capsule']['prod'], organization=org).create() cap_repo = entities.Repository( name=customcontents['capsule']['repo'], product=cap_product, url=capsule_repo, organization=org, content_type='yum').create() else: cap_product = entities.Product( name=rhelcontents['capsule']['prod'], organization=org ).search(query={'per_page': 100})[0] cap_reposet = entities.RepositorySet( name=rhelcontents['capsule']['repo'].format(cap_ver=to_version, os_ver=os_ver), product=cap_product ).search()[0] try: cap_reposet.enable( data={'basearch': 'x86_64', 'releasever': '7Server', 'organization_id': org.id}) except requests.exceptions.HTTPError as exp: logger.warn(exp) cap_repo = entities.Repository( name=rhelcontents['capsule']['repofull'].format( cap_ver=to_version, os_ver=os_ver, arch='x86_64') ).search(query={'organization_id': org.id, 'per_page': 100})[0] call_entity_method_with_timeout(entities.Repository(id=cap_repo.id).sync, timeout=2500) # Add repos to CV cv.repository += [cap_repo] cv.update(['repository']) ak = ak.read() if capsule_repo: cap_sub = entities.Subscription().search( query={'search': 'name={0}'.format(customcontents['capsule']['prod'])})[0] ak.add_subscriptions(data={ 'quantity': 1, 'subscription_id': cap_sub.id, }) else: ak.content_override( data={ 'content_override': { 'content_label': rhelcontents['capsule']['label'].format( cap_ver=to_version, os_ver=os_ver), 'value': '1'} } )
def _sync_sattools_repos_to_satellite_for_capsule(capsuletools_url, org): """Creates custom / Enables RH Tools repo on satellite and syncs for capsule upgrade :param str capsuletools_url: The capsule tools repo url :param org: ```nailgun.entities.Organization``` entity of capsule :returns: ```nailgun.entities.repository``` entity for capsule """ to_ver = os.environ.get('TO_VERSION') rhelver = '7' arch = 'x86_64' if capsuletools_url: captools_product = entities.Product( name=customcontents['capsule_tools']['prod'], organization=org).create() captools_repo = entities.Repository( name=customcontents['capsule_tools']['repo'], product=captools_product, url=capsuletools_url, organization=org, content_type='yum').create() else: captools_product = entities.Product( name=rhelcontents['tools']['prod'], organization=org).search(query={'per_page': 100})[0] cap_reposet = entities.RepositorySet( name=rhelcontents['tools']['repo'].format(sat_ver=to_ver, os_ver=rhelver), product=captools_product).search()[0] try: cap_reposet.enable(data={ 'basearch': arch, 'organization_id': org.id }) except requests.exceptions.HTTPError as exp: logger.warn(exp) time.sleep(5) captools_repo = entities.Repository( name=rhelcontents['tools']['repofull'].format( sat_ver=to_ver, os_ver=rhelver, arch=arch)).search( query={ 'organization_id': org.id, 'per_page': 100 })[0] call_entity_method_with_timeout( entities.Repository(id=captools_repo.id).sync, timeout=2500) captools_repo.repo_id = rhelcontents['tools']['label'].format( os_ver=rhelver, sat_ver=to_ver) return captools_repo
def enable_rhrepo_and_fetchid(basearch, org_id, product, repo, reposet, releasever=None, strict=False): """Enable a RedHat Repository and fetches it's Id. :param str org_id: The organization Id. :param str product: The product name in which repository exists. :param str reposet: The reposet name in which repository exists. :param str repo: The repository name who's Id is to be fetched. :param str basearch: The architecture of the repository. :param str optional releasever: The releasever of the repository. :param bool optional strict: Raise exception if the reposet was already enabled. :return: Returns the repository Id. :rtype: str """ product = entities.Product(name=product, organization=org_id).search()[0] r_set = entities.RepositorySet(name=reposet, product=product).search()[0] payload = {} if basearch is not None: payload['basearch'] = basearch if releasever is not None: payload['releasever'] = releasever payload['product_id'] = product.id try: r_set.enable(data=payload) except HTTPError as e: if (not strict and e.response.status_code == 409 and 'repository is already enabled' in e.response.json()['displayMessage']): pass else: raise result = entities.Repository(name=repo).search( query={'organization_id': org_id}) return result[0].id
def reposet(product): """Find and return the repository set matching REPOSET_NAME and product.""" return entities.RepositorySet(name=REPOSET_NAME, product=product).search()[0]
def test_positive_synchronize_rh_product_past_sync_date(): """Create a sync plan with past datetime as a sync date, add a RH product and verify the product gets synchronized on the next sync occurrence :id: 080c316d-4a06-4ee9-b5f6-1b210d8d0593 :expectedresults: Product is synchronized successfully. :customerscenario: true :BZ: 1279539, 1879537 :CaseLevel: System """ interval = 60 * 60 # 'hourly' sync interval in seconds delay = 2 * 60 org = entities.Organization().create() with manifests.clone() as manifest: entities.Subscription().upload(data={'organization_id': org.id}, files={'content': manifest.content}) repo_id = enable_rhrepo_and_fetchid( basearch='x86_64', org_id=org.id, product=PRDS['rhel'], repo=REPOS['rhst7']['name'], reposet=REPOSET['rhst7'], releasever=None, ) product = entities.Product(name=PRDS['rhel'], organization=org).search()[0] repo = entities.Repository(id=repo_id).read() sync_plan = entities.SyncPlan( organization=org, enabled=True, interval='hourly', sync_date=datetime.utcnow() - timedelta(seconds=interval - delay), ).create() # Associate sync plan with product sync_plan.add_products(data={'product_ids': [product.id]}) # Wait quarter of expected time logger.info( f"Waiting {(delay / 4)} seconds to check product {product.name}" f" was not synced by {sync_plan.name}") sleep(delay / 4) # Verify product has not been synced yet with pytest.raises(AssertionError): validate_task_status(repo.id, org.id, max_tries=1) validate_repo_content(repo, ['erratum', 'package', 'package_group'], after_sync=False) # Wait until the next recurrence logger.info( f"Waiting {(delay * 3 / 4)} seconds to check product {product.name}" f" was synced by {sync_plan.name}") sleep(delay * 3 / 4) # Verify product was synced successfully validate_task_status(repo.id, org.id) validate_repo_content(repo, ['erratum', 'package', 'package_group']) # Add disassociate RH product from sync plan check for BZ#1879537 assert len(sync_plan.read().product) == 1 # Disable the reposet reposet = entities.RepositorySet(name=REPOSET['rhst7'], product=product).search()[0] reposet.disable(data={ 'basearch': 'x86_64', 'releasever': None, 'product_id': product.id }) # Assert that the Sync Plan now has no product associated with it assert len(sync_plan.read().product) == 0