def setUpModule(): # pylint:disable=invalid-name """Conditionally skip tests. Create repositories with fixture data.""" cfg = config.get_config() if selectors.bug_is_untestable(1991, cfg.pulp_version): raise unittest.SkipTest('https://pulp.plan.io/issues/1991') set_up_module() # Fetch RPMs. _SIGNED_PACKAGES['rpm'] = utils.http_get(RPM_SIGNED_URL) _SIGNED_PACKAGES['srpm'] = utils.http_get(SRPM_SIGNED_URL) _UNSIGNED_PACKAGES['rpm'] = utils.http_get(RPM_UNSIGNED_URL) _UNSIGNED_PACKAGES['srpm'] = utils.http_get(SRPM_UNSIGNED_URL) if selectors.bug_is_testable(1806, cfg.pulp_version): _SIGNED_PACKAGES['drpm'] = utils.http_get(DRPM_SIGNED_URL) _UNSIGNED_PACKAGES['drpm'] = utils.http_get(DRPM_UNSIGNED_URL) # Create repos, and upload RPMs to them. client = api.Client(cfg, api.json_handler) try: repo = client.post(REPOSITORY_PATH, gen_repo()) _REPOS['signed'] = repo for type_id, pkg in _SIGNED_PACKAGES.items(): utils.upload_import_unit(cfg, pkg, {'unit_type_id': type_id}, repo) repo = client.post(REPOSITORY_PATH, gen_repo()) _REPOS['unsigned'] = repo for type_id, pkg in _UNSIGNED_PACKAGES.items(): utils.upload_import_unit(cfg, pkg, {'unit_type_id': type_id}, repo) except: # noqa:E722 _SIGNED_PACKAGES.clear() _UNSIGNED_PACKAGES.clear() for _ in range(len(_REPOS)): client.delete(_REPOS.popitem()[1]['_href']) raise
def test_all(self): """Re-sync a child repository with the ``remove_missing`` enabled.""" repos = [] cfg = config.get_config() if selectors.bug_is_untestable(2616, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2616') # Create 1st repo, sync and publish it. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[0]['_href']) repos[0] = _get_details(cfg, repos[0]) utils.sync_repo(cfg, repos[0]) utils.publish_repo(cfg, repos[0]) # Create 2nd repo, sync. body = gen_repo() body['importer_config']['feed'] = urljoin( cfg.get_base_url(), _PUBLISH_DIR + repos[0]['distributors'][0]['config']['relative_url'], ) body['importer_config']['remove_missing'] = True repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[1]['_href']) repos[1] = _get_details(cfg, repos[1]) utils.sync_repo(cfg, repos[1]) # Remove an arbitrary number of units from 1st repo, re-publish it. units = _get_rpms(cfg, repos[0]) marked_units = random.sample(units, random.randint(1, len(units))) for marked_unit in marked_units: criteria = { 'filters': { 'unit': { 'name': marked_unit['metadata']['name'] } }, 'type_ids': [marked_unit['unit_type_id']], } client.post( urljoin(repos[0]['_href'], 'actions/unassociate/'), {'criteria': criteria}, ) utils.publish_repo(cfg, repos[0]) # Re-sync 2nd repo. report = utils.sync_repo(cfg, repos[1]) tasks = tuple(api.poll_spawned_tasks(cfg, report.json())) self.assertEqual( tasks[0]['result']['removed_count'], len(marked_units), )
def test_all(self): """Test that recursive copy of erratas copies RPM packages. This test targets the following issues: * `Pulp Smash #769 <https://github.com/PulpQE/pulp-smash/issues/769>`_ * `Pulp #3004 <https://pulp.plan.io/issues/3004>`_ Do the following: 1. Create and sync a repository with errata, and RPM packages. 2. Create second repository. 3. Copy units from from first repository to second repository using ``recursive`` as true, and filter ``type_id`` as ``erratum``. 4. Assert that RPM packages were copied. """ cfg = config.get_config() if selectors.bug_is_untestable(3004, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/3004') repos = [] client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UPDATED_INFO_FEED_URL body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[0]['_href']) utils.sync_repo(cfg, repos[0]) # Create a second repository. repos.append(client.post(REPOSITORY_PATH, gen_repo())) self.addCleanup(client.delete, repos[1]['_href']) # Copy data to second repository. client.post( urljoin(repos[1]['_href'], 'actions/associate/'), { 'source_repo_id': repos[0]['id'], 'override_config': { 'recursive': True }, 'criteria': { 'filters': {}, 'type_ids': ['erratum'] }, }) # Assert that RPM packages were copied. units = utils.search_units(cfg, repos[1], {'type_ids': ['rpm']}) self.assertGreater(len(units), 0)
def test_02_create_immediate_child(self): """Create a child repository with the "immediate" download policy. Sync the child repository, and verify it has the same contents as the root repository. """ # We disable SSL validation for a practical reason: each HTTPS feed # must have a certificate to work, which is burdensome to do here. client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = urljoin( self.cfg.get_base_url(), _PUBLISH_DIR + self.repos['root']['distributors'][0]['config']['relative_url'], ) body['importer_config']['remove_missing'] = True body['importer_config']['ssl_validation'] = False self.repos['immediate'] = client.post(REPOSITORY_PATH, body) self.repos['immediate'] = (_get_details(self.cfg, self.repos['immediate'])) utils.sync_repo(self.cfg, self.repos['immediate']) # Verify the two repositories have the same contents. root_ids = _get_rpm_ids(_get_rpms(self.cfg, self.repos['root'])) immediate_ids = _get_rpm_ids( _get_rpms(self.cfg, self.repos['immediate'])) self.assertEqual(root_ids, immediate_ids)
def test_update_checksum_type(self): """Check if RPM distributor can receive null checksum_type. See: https://pulp.plan.io/issues/2134. """ cfg = config.get_config() if cfg.pulp_version < version.Version('2.9'): raise unittest.SkipTest('This test requires Pulp 2.9 or above.') client = api.Client(cfg, api.json_handler) distributor = gen_distributor() body = gen_repo() body['distributors'] = [distributor] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) for checksum_type in (None, 'sha256', None): client.put( repo['_href'], { 'distributor_configs': { distributor['distributor_id']: { 'checksum_type': checksum_type, } } }) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual( repo['distributors'][0]['config'].get('checksum_type'), checksum_type)
def test_all(self): """Upload a package group to a repository twice.""" cfg = config.get_config() if check_issue_3104(cfg): self.skipTest('https://pulp.plan.io/issues/3104') client = api.Client(cfg, api.json_handler) self.addCleanup(client.delete, ORPHANS_PATH) # Create a repository. body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) # Give the repository a package group, and publish the repository. package_group = {'id': utils.uuid4(), 'name': utils.uuid4()} _upload_import_package_group(cfg, repo, package_group) repo = client.get(repo['_href'], params={'details': True}) utils.publish_repo(cfg, repo) # Update the repository's package group, and re-publish the repository. package_group['name'] = utils.uuid4() _upload_import_package_group(cfg, repo, package_group) utils.publish_repo(cfg, repo) # Fetch the generated repodata of type 'group' (a.k.a. 'comps'). Verify # the package group portion. root_element = get_repodata(cfg, repo['distributors'][0], 'group') groups = root_element.findall('group') self.assertEqual(len(groups), 1, ElementTree.tostring(root_element)) for key, value in package_group.items(): with self.subTest(key=key): self.assertEqual(groups[0].find(key).text, value)
def test_all(self): """Test that uploading DRPM with checksumtype specified works.""" if selectors.bug_is_untestable(1806, self.cfg.pulp_version): raise unittest.SkipTest('https://pulp.plan.io/issues/1806') if selectors.bug_is_untestable(2627, self.cfg.pulp_version): raise unittest.SkipTest('https://pulp.plan.io/issues/2627') client = api.Client(self.cfg) repo = client.post(REPOSITORY_PATH, gen_repo()).json() self.addCleanup(client.delete, repo['_href']) drpm = utils.http_get(DRPM_UNSIGNED_URL) utils.upload_import_unit( self.cfg, drpm, { 'unit_type_id': 'drpm', 'unit_metadata': { 'checksumtype': 'sha256' }, }, repo, ) units = utils.search_units(self.cfg, repo, {}) self.assertEqual(len(units), 1, units) # Test if DRPM extracted correct metadata for creating filename. self.assertEqual( units[0]['metadata']['filename'], DRPM, )
def setUpModule(): # pylint:disable=invalid-name """Possibly skip the tests in this module. Create and sync an RPM repo. Skip this module of tests if Pulp is older than version 2.9. (See `Pulp #1724`_.) Then create an RPM repository with a feed and sync it. Test cases may copy data from this repository but should **not** change it. .. _Pulp #1724: https://pulp.plan.io/issues/1724 """ set_up_module() cfg = config.get_config() if cfg.pulp_version < Version('2.9'): raise unittest.SkipTest('This module requires Pulp 2.9 or greater.') if check_issue_2277(cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/2277') # Create and sync a repository. client = api.Client(cfg, api.json_handler) _CLEANUP.append((client.delete, [ORPHANS_PATH], {})) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL _REPO.clear() _REPO.update(client.post(REPOSITORY_PATH, body)) _CLEANUP.append((client.delete, [_REPO['_href']], {})) try: utils.sync_repo(cfg, _REPO) except (exceptions.CallReportError, exceptions.TaskReportError, exceptions.TaskTimedOutError): tearDownModule() raise
def setUpClass(cls): """Create a schedule to publish a repo, verify the ``total_run_count``. Do the following: 1. Create a repository with a valid feed 2. Sync it 3. Schedule publish to run every 2 minutes 4. Wait for 130 seconds and read the schedule to get the number of "publish" runs """ super(ScheduledPublishTestCase, cls).setUpClass() client = api.Client(cls.cfg, api.json_handler) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) # Schedule a publish to run every 2 minutes distributor = gen_distributor() client.post(urljoin(repo['_href'], 'distributors/'), distributor) scheduling_url = '/'.join([ 'distributors', distributor['distributor_id'], 'schedules/publish/' ]) schedule_path = urljoin(repo['_href'], scheduling_url) schedule = client.post(schedule_path, {'schedule': 'PT2M'}) # Wait for publish to run time.sleep(130) # Read the schedule cls.response = client.get(schedule['_href'])
def setUpClass(cls): """Create a repository.""" cls.cfg = config.get_config() client = api.Client(cls.cfg) body = gen_repo() body['distributors'] = [gen_distributor()] cls.repo = client.post(REPOSITORY_PATH, body).json()
def setUpClass(cls): """Generate, fetch and parse a ``repomd.xml`` file. Do the following: 1. Create an RPM repository with a YUM distributor and publish it. 2. Fetch the ``repomd.xml`` file from the distributor, and parse it. """ super(RepoMDTestCase, cls).setUpClass() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') if check_issue_2277(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/2277') # Create a repository with a yum distributor and publish it. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) repo = client.get(repo['_href'], params={'details': True}) cls.resources.add(repo['_href']) utils.publish_repo(cls.cfg, repo) # Fetch and parse repomd.xml client.response_handler = xml_handler path = urljoin( '/pulp/repos/', repo['distributors'][0]['config']['relative_url'], ) path = urljoin(path, 'repodata/repomd.xml') cls.root_element = client.get(path)
def setUpClass(cls): """Create a schedule to publish the repository. Do the following: 1. Create a repository with a valid feed 2. Sync it 3. Schedule publish to run every 30 seconds """ super(CreateSuccessTestCase, cls).setUpClass() client = api.Client(cls.cfg) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body).json() cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) # Schedule a publish to run every 30 seconds distributor = gen_distributor() distributor_url = urljoin(repo['_href'], 'distributors/') client.post(distributor_url, distributor) scheduling_url = urljoin( distributor_url, '{}/schedules/publish/'.format(distributor['distributor_id']), ) cls.response = client.post(scheduling_url, {'schedule': 'PT30S'}) cls.attrs = cls.response.json()
def make_repo(self, cfg, dist_cfg_updates): """Create a repository with an importer and pair of distributors. Create an RPM repository with: * A yum importer with a valid feed. * A yum distributor. * An RPM rsync distributor referencing the yum distributor. In addition, schedule the repository for deletion. :param pulp_smash.config.PulpSmashConfig cfg: Information about the Pulp deployment being targeted. :param dist_cfg_updates: A dict to be merged into the RPM rsync distributor's ``distributor_config`` dict. At a minimum, this argument should have a value of ``{'remote': {…}}``. :returns: A detailed dict of information about the repo. """ api_client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] body['distributors'].append({ 'distributor_id': utils.uuid4(), 'distributor_type_id': 'rpm_rsync_distributor', 'distributor_config': { 'predistributor_id': body['distributors'][0]['distributor_id'], } }) body['distributors'][1]['distributor_config'].update(dist_cfg_updates) repo = api_client.post(REPOSITORY_PATH, body) self.addCleanup(api_client.delete, repo['_href']) return api_client.get(repo['_href'], params={'details': True})
def test_02_create_on_demand_child(self): """Create a child repository with the "on_demand" download policy. Also, let the repository's "remove_missing" option be true. Then, sync the child repository, and verify it has the same contents as the root repository. """ client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = urljoin( self.cfg.get_base_url(), _PUBLISH_DIR + self.repos['root']['distributors'][0]['config']['relative_url'], ) body['importer_config']['download_policy'] = 'on_demand' body['importer_config']['remove_missing'] = True body['importer_config']['ssl_validation'] = False self.repos['on demand'] = client.post(REPOSITORY_PATH, body) self.repos['on demand'] = (_get_details(self.cfg, self.repos['on demand'])) utils.sync_repo(self.cfg, self.repos['on demand']) # Verify the two repositories have the same contents. root_ids = _get_rpm_ids(_get_rpms(self.cfg, self.repos['root'])) on_demand_ids = _get_rpm_ids( _get_rpms(self.cfg, self.repos['on demand'])) self.assertEqual(root_ids, on_demand_ids)
def setUpClass(cls): """Create an RPM repository, upload package groups, and publish.""" super(UploadPackageGroupsTestCase, cls).setUpClass() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') # Create a repository and add a distributor to it. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) repo = client.get(repo['_href'], params={'details': True}) # Generate several package groups, import them into the repository, and # publish the repository. cls.package_groups = { 'minimal': _gen_minimal_group(), 'realistic': _gen_realistic_group(), } cls.tasks = {} for key, package_group in cls.package_groups.items(): report = _upload_import_package_group(cls.cfg, repo, package_group) cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report)) utils.publish_repo(cls.cfg, repo) # Fetch the generated repodata of type 'group' (a.k.a. 'comps') cls.root_element = (get_repodata(cls.cfg, repo['distributors'][0], 'group'))
def test_all(self): """Test whether one can upload an RPM with non-ascii metadata.""" cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(REPOSITORY_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) rpm = utils.http_get(RPM_WITH_NON_ASCII_URL) utils.upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
def _create_repository(cfg, importer_config): """Create an RPM repository with the given importer configuration. Return a dict of information about the repository. """ body = gen_repo() body['importer_config'] = importer_config return api.Client(cfg).post(REPOSITORY_PATH, body).json()
def setUp(self): """Perform common set-up tasks.""" self.client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['importer_config'] = {'feed': RPM_UNSIGNED_FEED_URL} repo = self.client.post(REPOSITORY_PATH, body) self.addCleanup(self.client.delete, repo['_href']) self.repo = self.client.get(repo['_href'], params={'details': True})
def setUpClass(cls): """Create several schedules. Each schedule is created to test a different failure scenario. """ super(CreateFailureTestCase, cls).setUpClass() client = api.Client(cls.cfg) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body).json() cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) # Add a distibutor distributor = gen_distributor() client.post(urljoin(repo['_href'], 'distributors/'), distributor) client.response_handler = api.echo_handler cls.bodies = ( { 'schedule': None }, # 400 { 'unknown': 'parameter', 'schedule': 'PT30S' }, # 400 ['Incorrect data type'], # 400 { 'missing_required_keys': 'schedule' }, # 400 { 'schedule': 'PT30S' }, # tests incorrect distributor in url, 404 { 'schedule': 'PT30S' }, # tests incorrect repo in url, 404 ) scheduling_url = '/'.join([ 'distributors', distributor['distributor_id'], 'schedules/publish/' ]) bad_distributor_url = '/'.join( ['distributors', utils.uuid4(), 'schedules/publish/']) bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()]) cls.paths = (urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], bad_distributor_url), urljoin(bad_repo_path, scheduling_url)) cls.status_codes = (400, 400, 400, 400, 404, 404) cls.responses = [ client.post(path, req_body) for path, req_body in zip(cls.paths, cls.bodies) ]
def create_repo(): """Create a repository and schedule its deletion. Append a dict of information about the repository to ``repos``. """ body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repos.append(repo)
def test_all(self): """Test whether one can upload an RPM with non-ascii metadata.""" cfg = config.get_config() if selectors.bug_is_untestable(1903, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/1903') client = api.Client(cfg, api.json_handler) repo = client.post(REPOSITORY_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) rpm = utils.http_get(RPM_WITH_NON_UTF_8_URL) utils.upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
def test_all(self): """Test whether copied files retain their original mtime. This test targets the following issues: * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_ * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_ Do the following: 1. Create, sync and publish a repository, with ``generate_sqlite`` set to true. 2. Get the ``mtime`` of the sqlite files. 3. Upload an RPM package into the repository, and sync the repository. 4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes are the same. """ cfg = config.get_config() if selectors.bug_is_untestable(2783, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2783') # Create, sync and publish a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] body['distributors'][0]['distributor_config']['generate_sqlite'] = True repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) utils.sync_repo(cfg, repo) utils.publish_repo(cfg, repo) # Get the mtime of the sqlite files. cli_client = cli.Client(cfg, cli.echo_handler) cmd = '' if utils.is_root(cfg) else 'sudo ' cmd += "bash -c \"stat --format %Y '{}'/*\"".format( os.path.join( _PATH, repo['distributors'][0]['config']['relative_url'], 'repodata', )) mtimes_pre = ( cli_client.machine.session().run(cmd)[1].strip().split().sort()) # Upload to the repo, and sync it. rpm = utils.http_get(RPM_SIGNED_URL) utils.upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo) utils.sync_repo(cfg, repo) # Get the mtime of the sqlite files again. time.sleep(1) mtimes_post = ( cli_client.machine.session().run(cmd)[1].strip().split().sort()) self.assertEqual(mtimes_pre, mtimes_post)
def test_all(self): """Test whether ``httpd`` dispatches a task while the broker is down. This test targets the following issues: * `Pulp Smash #650 <https://github.com/PulpQE/pulp-smash/issues/650>`_ * `Pulp #2770 <https://pulp.plan.io/issues/2770>`_ This test does the following: 1. Create a repository. 2. Stop the AMQP broker. (Also, schedule it to be re-started later!) 3. Sync the repository, ignore any errors that are returned when doing so, and assert that no tasks are left in the ``waiting`` state. """ cfg = config.get_config() if selectors.bug_is_untestable(2770, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2770') # Create a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) # Stop the AMQP broker. broker = [utils.get_broker(cfg)] svc_mgr = cli.GlobalServiceManager(cfg) svc_mgr.stop(broker) self.addCleanup(svc_mgr.start, broker) # Sync the repo, and assert no tasks are left in the waiting state. try: utils.sync_repo(cfg, repo) except HTTPError: pass tasks = client.post( urljoin(TASKS_PATH, 'search/'), { 'criteria': { 'fields': [ 'finish_time', 'start_time', 'state', 'tags', 'task_id', ], 'filters': { 'state': { '$in': ['waiting'] } }, } }) self.assertEqual(len(tasks), 0, tasks)
def test_all(self): """Sync a repo whose updateinfo file has multiple pkglist sections. Specifically, do the following: 1. Create, sync and publish an RPM repository whose feed is set to :data:`pulp_smash.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL`. 2. Fetch and parse the published repository's ``updateinfo.xml`` file. Verify that the ``updateinfo.xml`` file has three packages whose ``<filename>`` elements have the following text: * penguin-0.9.1-1.noarch.rpm * shark-0.1-1.noarch.rpm * walrus-5.21-1.noarch.rpm Note that Pulp is free to change the structure of a source repository at will. For example, the source repository has three ``<collection>`` elements, the published repository can have one, two or three ``<collection>`` elements. Assertions are not made about these details. """ cfg = config.get_config() if check_issue_3104(cfg): self.skipTest('https://pulp.plan.io/issues/3104') if selectors.bug_is_untestable(2227, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2277') # Create, sync and publish a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) utils.sync_repo(cfg, repo) utils.publish_repo(cfg, repo) # Fetch and parse ``updateinfo.xml``. updates_element = (get_repodata(cfg, repo['distributors'][0], 'updateinfo')) # Verify the ``updateinfo.xml`` file. debug = ElementTree.tostring(updates_element) filename_elements = (updates_element.findall( 'update/pkglist/collection/package/filename')) filenames = [ filename_element.text for filename_element in filename_elements ] filenames.sort() self.assertEqual(filenames, [ 'penguin-0.9.1-1.noarch.rpm', 'shark-0.1-1.noarch.rpm', 'walrus-5.21-1.noarch.rpm', ], debug)
def test_rpm(self): """Upload duplicate RPM content.See :meth:`do_test`. This test targets the following issues: * `Pulp Smash #81 <https://github.com/PulpQE/pulp-smash/issues/81>`_ * `Pulp #1406 <https://pulp.plan.io/issues/1406>`_ """ if selectors.bug_is_untestable(1406, self.cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/1406') self.do_test(RPM_UNSIGNED_URL, 'rpm', gen_repo())
def setUpClass(cls): """Create and sync a repository.""" super(ForceFullTestCase, cls).setUpClass() client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) cls.repo = client.get(repo['_href'], params={'details': True})
def setUpClass(cls): """Create an RPM repository with a valid feed and sync it.""" if inspect.getmro(cls)[0] == SyncRepoBaseTestCase: raise unittest.SkipTest('Abstract base class.') super(SyncRepoBaseTestCase, cls).setUpClass() client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = cls.get_feed_url() cls.repo = client.post(REPOSITORY_PATH, body) cls.resources.add(cls.repo['_href']) cls.report = utils.sync_repo(cls.cfg, cls.repo)
def test_all(self): """Sync a repository, change its feed, and sync it again.""" if check_issue_3104(self.cfg): self.skipTest('https://pulp.plan.io/issues/3104') # Create, sync and publish repositories A and B. repos = [] for _ in range(2): body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] repos.append(self.create_sync_publish_repo(body)) # Create repository C, let it sync from repository A, and publish it. body = gen_repo() body['importer_config']['feed'] = self.get_feed(repos[0]) body['importer_config']['ssl_validation'] = False body['distributors'] = [gen_distributor()] repo = self.create_sync_publish_repo(body) # Update repository C. client = api.Client(self.cfg, api.json_handler) feed = self.get_feed(repos[1]) client.put(repo['importers'][0]['_href'], {'importer_config': { 'feed': feed }}) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual(repo['importers'][0]['config']['feed'], feed) # Sync and publish repository C. utils.sync_repo(self.cfg, repo) utils.publish_repo(self.cfg, repo) rpm = utils.http_get(RPM_UNSIGNED_URL) response = get_unit(self.cfg, repo['distributors'][0], RPM) with self.subTest(): self.assertIn(response.headers['content-type'], ('application/octet-stream', 'application/x-rpm')) with self.subTest(): self.assertEqual(rpm, response.content)
def test_upload(self): """Test whether Pulp recognizes an uploaded RPM's vendor information. Create a repository, upload an RPM with a non-null vendor, and perform several checks. See :meth:`do_test`. """ client = api.Client(self.cfg, api.json_handler) repo = client.post(REPOSITORY_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) rpm = utils.http_get(RPM_WITH_VENDOR_URL) utils.upload_import_unit(self.cfg, rpm, {'unit_type_id': 'rpm'}, repo) self.do_test(repo)
def create_repo(cls): """Create a semi-random RPM repository with a valid RPM feed URL. Add this repository's href to ``cls.resources``. Return a two-tuple of ``(href, importer_type_id)``. This method requires a server config, ``cls.cfg``, and a set, ``cls.resources``. """ body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL href = api.Client(cls.cfg).post(REPOSITORY_PATH, body).json()['_href'] cls.resources.add(href) return (href, body['importer_type_id'])