def setUpClass(cls): """Create RPM repository, delete a package, and publish the repository. More specifically, do the following: 1. Create an RPM repository. 2. Add a YUM distributor. 3. Sync the created repository. 4. Remove the ``gorilla`` package 5. Publish the repository. Fetch the ``updateinfo.xml`` file from the distributor (via ``repomd.xml``), and parse it. """ super(ErratumPkgListCountTestCase, cls).setUpClass() # Create a repository, sync it, and add a yum distributor. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_FEED_URL repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo['_href']) distributor = client.post( urljoin(repo['_href'], 'distributors/'), gen_distributor(), ) # Remove the gorilla package unit client.post( urljoin(repo['_href'], 'actions/unassociate/'), {'criteria': get_unit_unassociate_criteria(RPM_ERRATUM_RPM_NAME)}, ) # Publish the repository client.post( urljoin(repo['_href'], 'actions/publish/'), {'id': distributor['id']}, ) # Fetch and parse updateinfo.xml (or updateinfo.xml.gz), via repomd.xml root_element = get_repomd_xml( cls.cfg, urljoin('/pulp/repos/', distributor['config']['relative_url']), 'updateinfo' ) # Fetch the erratum and erratum pkglist for the gorilla package updates = _get_updates_by_id(root_element) erratum = updates[RPM_ERRATUM_ID] cls.erratum_pkglists = erratum.findall('pkglist')
def setUpClass(cls): """Create RPM repository, delete a package, and publish the repository. More specifically, do the following: 1. Create an RPM repository. 2. Add a YUM distributor. 3. Sync the created repository. 4. Remove the ``gorilla`` package 5. Publish the repository. Fetch the ``updateinfo.xml`` file from the distributor (via ``repomd.xml``), and parse it. """ super(ErratumPkgListCountTestCase, cls).setUpClass() # Create a repository, sync it, and add a yum distributor. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo['_href']) distributor = client.post( urljoin(repo['_href'], 'distributors/'), gen_distributor(), ) # Remove the gorilla package unit client.post( urljoin(repo['_href'], 'actions/unassociate/'), {'criteria': get_unit_unassociate_criteria(RPM_ERRATUM_RPM_NAME)}, ) # Publish the repository client.post( urljoin(repo['_href'], 'actions/publish/'), {'id': distributor['id']}, ) # Fetch and parse updateinfo.xml (or updateinfo.xml.gz), via repomd.xml root_element = get_repomd_xml( cls.cfg, urljoin('/pulp/repos/', distributor['config']['relative_url']), 'updateinfo' ) # Fetch the erratum and erratum pkglist for the gorilla package updates = _get_updates_by_id(root_element) erratum = updates[RPM_ERRATUM_ID] cls.erratum_pkglists = erratum.findall('pkglist')
def setUpClass(cls): """Create two repositories, first is feed of second one. Provides server config and set of iterable to delete. Following steps are executed: 1. Create repository foo with feed, sync and publish it. 2. Create repository bar with foo as a feed and run sync. 3. Get content of both repositories. 4. Remove random unit from repository foo and publish foo. 5. Sync repository bar. 6. Get content of both repositories. """ super(RemoveMissingTestCase, cls).setUpClass() if check_issue_2277(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/2277') cls.responses = {} hrefs = [] # repository hrefs # Create and sync a repository. client = api.Client(cls.cfg) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href']) cls.resources.add(hrefs[0]) # mark for deletion cls.responses['first sync'] = utils.sync_repo(cls.cfg, hrefs[0]) # Add a distributor and publish it. cls.responses['distribute'] = client.post( urljoin(hrefs[0], 'distributors/'), gen_distributor(), ) cls.responses['first publish'] = client.post( urljoin(hrefs[0], 'actions/publish/'), {'id': cls.responses['distribute'].json()['id']}, ) # Create and sync a second repository. We disable SSL validation for a # practical reason: each HTTPS feed must have a certificate to work, # which is burdensome to do here. body = gen_repo() body['importer_config']['feed'] = urljoin( cls.cfg.base_url, _PUBLISH_DIR + cls.responses['distribute'].json()['config']['relative_url'], ) body['importer_config']['remove_missing'] = True # see docstring body['importer_config']['ssl_validation'] = False hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href']) cls.resources.add(hrefs[1]) # mark for deletion cls.responses['second sync'] = utils.sync_repo(cls.cfg, hrefs[1]) # Get contents of both repositories for i, href in enumerate(hrefs): cls.responses['repo {} units, pre'.format(i)] = client.post( urljoin(href, 'search/units/'), {'criteria': {}}, ) # Get random unit from first repository to remove cls.removed_unit = random.choice([ unit['metadata']['name'] for unit in cls.responses['repo 0 units, pre'].json() if unit['unit_type_id'] == 'rpm' ]) # Remove unit from first repo and publish again cls.responses['remove unit'] = client.post( urljoin(hrefs[0], 'actions/unassociate/'), { 'criteria': get_unit_unassociate_criteria(cls.removed_unit), }, ) # Publish the first repo again, and sync the second repo again. cls.responses['second publish'] = client.post( urljoin(hrefs[0], 'actions/publish/'), {'id': cls.responses['distribute'].json()['id']}, ) cls.responses['third sync'] = utils.sync_repo(cls.cfg, hrefs[1]) # Search for units in both repositories again for i, href in enumerate(hrefs): cls.responses['repo {} units, post'.format(i)] = client.post( urljoin(href, 'search/units/'), {'criteria': {}}, )
def setUpClass(cls): """Create two repositories, first is feed of second one. Provides server config and set of iterable to delete. Following steps are executed: 1. Create repository foo with feed, sync and publish it. 2. Create repository bar with foo as a feed and run sync. 3. Get content of both repositories. 4. Remove random unit from repository foo and publish foo. 5. Sync repository bar. 6. Get content of both repositories. """ super(RemoveMissingTestCase, cls).setUpClass() if selectors.bug_is_untestable(2277, cls.cfg.version): raise unittest.SkipTest("https://pulp.plan.io/issues/2277") cls.responses = {} hrefs = [] # repository hrefs # Create and sync a repository. client = api.Client(cls.cfg) body = gen_repo() body["importer_config"]["feed"] = RPM_FEED_URL hrefs.append(client.post(REPOSITORY_PATH, body).json()["_href"]) cls.resources.add(hrefs[0]) # mark for deletion cls.responses["first sync"] = utils.sync_repo(cls.cfg, hrefs[0]) # Add a distributor and publish it. cls.responses["distribute"] = client.post(urljoin(hrefs[0], "distributors/"), gen_distributor()) cls.responses["first publish"] = client.post( urljoin(hrefs[0], "actions/publish/"), {"id": cls.responses["distribute"].json()["id"]} ) # Create and sync a second repository. We disable SSL validation for a # practical reason: each HTTPS feed must have a certificate to work, # which is burdensome to do here. body = gen_repo() body["importer_config"]["feed"] = urljoin( cls.cfg.base_url, _PUBLISH_DIR + cls.responses["distribute"].json()["config"]["relative_url"] ) body["importer_config"]["remove_missing"] = True # see docstring body["importer_config"]["ssl_validation"] = False hrefs.append(client.post(REPOSITORY_PATH, body).json()["_href"]) cls.resources.add(hrefs[1]) # mark for deletion cls.responses["second sync"] = utils.sync_repo(cls.cfg, hrefs[1]) # Get contents of both repositories for i, href in enumerate(hrefs): cls.responses["repo {} units, pre".format(i)] = client.post( urljoin(href, "search/units/"), {"criteria": {}} ) # Get random unit from first repository to remove cls.removed_unit = random.choice( [ unit["metadata"]["name"] for unit in cls.responses["repo 0 units, pre"].json() if unit["unit_type_id"] == "rpm" ] ) # Remove unit from first repo and publish again cls.responses["remove unit"] = client.post( urljoin(hrefs[0], "actions/unassociate/"), {"criteria": get_unit_unassociate_criteria(cls.removed_unit)} ) # Publish the first repo again, and sync the second repo again. cls.responses["second publish"] = client.post( urljoin(hrefs[0], "actions/publish/"), {"id": cls.responses["distribute"].json()["id"]} ) cls.responses["third sync"] = utils.sync_repo(cls.cfg, hrefs[1]) # Search for units in both repositories again for i, href in enumerate(hrefs): cls.responses["repo {} units, post".format(i)] = client.post( urljoin(href, "search/units/"), {"criteria": {}} )
def setUpClass(cls): """Create two repositories, first is feed of second one. Provides server config and set of iterable to delete. Following steps are executed: 1. Create repository foo with feed, sync and publish it. 2. Create repository bar with foo as a feed and run sync. 3. Get content of both repositories. 4. Remove random unit from repository foo and publish foo. 5. Sync repository bar. 6. Get content of both repositories. """ super(RemoveMissingTestCase, cls).setUpClass() if check_issue_2277(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/2277') cls.responses = {} hrefs = [] # repository hrefs # Create and sync a repository. client = api.Client(cls.cfg) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href']) cls.resources.add(hrefs[0]) # mark for deletion cls.responses['first sync'] = utils.sync_repo(cls.cfg, hrefs[0]) # Add a distributor and publish it. cls.responses['distribute'] = client.post( urljoin(hrefs[0], 'distributors/'), gen_distributor(), ) cls.responses['first publish'] = client.post( urljoin(hrefs[0], 'actions/publish/'), {'id': cls.responses['distribute'].json()['id']}, ) # Create and sync a second repository. We disable SSL validation for a # practical reason: each HTTPS feed must have a certificate to work, # which is burdensome to do here. body = gen_repo() body['importer_config']['feed'] = urljoin( cls.cfg.base_url, _PUBLISH_DIR + cls.responses['distribute'].json()['config']['relative_url'], ) body['importer_config']['remove_missing'] = True # see docstring body['importer_config']['ssl_validation'] = False hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href']) cls.resources.add(hrefs[1]) # mark for deletion cls.responses['second sync'] = utils.sync_repo(cls.cfg, hrefs[1]) # Get contents of both repositories for i, href in enumerate(hrefs): cls.responses['repo {} units, pre'.format(i)] = client.post( urljoin(href, 'search/units/'), {'criteria': {}}, ) # Get random unit from first repository to remove cls.removed_unit = random.choice([ unit['metadata']['name'] for unit in cls.responses['repo 0 units, pre'].json() if unit['unit_type_id'] == 'rpm' ]) # Remove unit from first repo and publish again cls.responses['remove unit'] = client.post( urljoin(hrefs[0], 'actions/unassociate/'), { 'criteria': get_unit_unassociate_criteria(cls.removed_unit), }, ) # Publish the first repo again, and sync the second repo again. cls.responses['second publish'] = client.post( urljoin(hrefs[0], 'actions/publish/'), {'id': cls.responses['distribute'].json()['id']}, ) cls.responses['third sync'] = utils.sync_repo(cls.cfg, hrefs[1]) # Search for units in both repositories again for i, href in enumerate(hrefs): cls.responses['repo {} units, post'.format(i)] = client.post( urljoin(href, 'search/units/'), {'criteria': {}}, )