def test_all(self): """Verify whether uploaded module.yaml is reflected in the pulp repo.""" cfg = config.get_config() if cfg.pulp_version < Version('2.17'): raise unittest.SkipTest( 'This test requires at least Pulp 2.17 or newer.') client = api.Client(cfg, api.json_handler) # Create a normal Repo without any data. body = gen_repo(importer_config={'feed': RPM_UNSIGNED_FEED_URL}, distributors=[gen_distributor()]) repo = client.post(REPOSITORY_PATH, body) repo = client.get(repo['_href'], params={'details': True}) self.addCleanup(client.delete, repo['_href']) sync_repo(cfg, repo) # download modules.yaml and upload it to pulp_repo unit = self._get_module_yaml_file(RPM_WITH_MODULES_FEED_URL) upload_import_unit(cfg, unit, { 'unit_key': {}, 'unit_type_id': 'modulemd', }, repo) repo = client.get(repo['_href'], params={'details': True}) # Assert that `modulemd` and `modulemd_defaults` are present on the # repository. self.assertIsNotNone(repo['content_unit_counts']['modulemd']) self.assertIsNotNone(repo['content_unit_counts']['modulemd_defaults'])
def test_all(self): """Verify whether package manager can read module list from a Pulp repo.""" cfg = config.get_config() if cfg.pulp_version < Version('2.17'): raise unittest.SkipTest( 'This test requires at least Pulp 2.17 or newer.') if not os_support_modularity(cfg): raise unittest.SkipTest( 'This test requires an OS that supports modularity.') client = api.Client(cfg, api.json_handler) body = gen_repo(importer_config={'feed': RPM_WITH_MODULES_FEED_URL}, distributors=[gen_distributor()]) repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) sync_repo(cfg, repo) publish_repo(cfg, repo) repo = client.get(repo['_href'], params={'details': True}) repo_path = gen_yum_config_file( cfg, baseurl=urljoin( cfg.get_base_url(), urljoin('pulp/repos/', repo['distributors'][0]['config']['relative_url'])), name=repo['_href'], repositoryid=repo['id']) cli_client = cli.Client(cfg) self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True) lines = cli_client.run((('dnf', 'module', 'list', '--all')), sudo=True).stdout.splitlines() for key, value in MODULE_FIXTURES_PACKAGES.items(): with self.subTest(package=key): module = [line for line in lines if key in line] self.assertEqual(len(module), value, module)
def setUpClass(cls): """Create an RPM repository, upload package groups, and publish.""" super().setUpClass() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') # Create a repository and add a distributor to it. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) repo = client.get(repo['_href'], params={'details': True}) # Generate several package groups, import them into the repository, and # publish the repository. cls.package_groups = { 'minimal': _gen_minimal_group(), 'realistic': _gen_realistic_group(), } cls.tasks = {} for key, package_group in cls.package_groups.items(): report = _upload_import_package_group(cls.cfg, repo, package_group) cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report)) publish_repo(cls.cfg, repo) # Fetch the generated repodata of type 'group' (a.k.a. 'comps') cls.root_element = (get_repodata(cls.cfg, repo['distributors'][0], 'group'))
def do_test(self, recursive, recursive_conservative): """Copy of units for a repository with rich/weak dependencies.""" repos = [] body = gen_repo(importer_config={'feed': RPM_RICH_WEAK_FEED_URL}, distributors=[gen_distributor()]) repos.append(self.client.post(REPOSITORY_PATH, body)) self.addCleanup(self.client.delete, repos[0]['_href']) sync_repo(self.cfg, repos[0]) repos.append(self.client.post(REPOSITORY_PATH, gen_repo())) self.addCleanup(self.client.delete, repos[1]['_href']) # Pulp 2.18.1 introduced a new flag `recursive_conservative`. # If true, units are copied together with their # dependencies, unless those are already satisfied by the content in # the target repository. override_config = {'recursive': recursive} if self.cfg.pulp_version >= Version('2.18.1'): override_config.update( {'recursive_conservative': recursive_conservative}) self.client.post( urljoin(repos[1]['_href'], 'actions/associate/'), { 'source_repo_id': repos[0]['id'], 'override_config': override_config, 'criteria': { 'filters': { 'unit': { 'name': RPM2_RICH_WEAK_DATA['name'] } }, 'type_ids': ['rpm'], }, }) return self.client.get(repos[1]['_href'], params={'details': True})
def setUpClass(cls): """Create a schedule to publish a repo, verify the ``total_run_count``. Do the following: 1. Create a repository with a valid feed 2. Sync it 3. Schedule publish to run every 2 minutes 4. Wait for 130 seconds and read the schedule to get the number of "publish" runs """ super().setUpClass() client = api.Client(cls.cfg, api.json_handler) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) sync_repo(cls.cfg, repo) # Schedule a publish to run every 2 minutes distributor = gen_distributor() client.post(urljoin(repo['_href'], 'distributors/'), distributor) scheduling_url = '/'.join([ 'distributors', distributor['distributor_id'], 'schedules/publish/' ]) schedule_path = urljoin(repo['_href'], scheduling_url) schedule = client.post(schedule_path, {'schedule': 'PT2M'}) # Wait for publish to run time.sleep(130) # Read the schedule cls.response = client.get(schedule['_href'])
def setUpClass(cls): """Create a schedule to publish the repository. Do the following: 1. Create a repository with a valid feed 2. Sync it 3. Schedule publish to run every 30 seconds """ super().setUpClass() client = api.Client(cls.cfg) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body).json() cls.resources.add(repo['_href']) sync_repo(cls.cfg, repo) # Schedule a publish to run every 30 seconds distributor = gen_distributor() distributor_url = urljoin(repo['_href'], 'distributors/') client.post(distributor_url, distributor) scheduling_url = urljoin( distributor_url, '{}/schedules/publish/'.format(distributor['distributor_id']), ) cls.response = client.post(scheduling_url, {'schedule': 'PT30S'}) cls.attrs = cls.response.json()
def setUpClass(cls): """Create a repository.""" cls.cfg = config.get_config() client = api.Client(cls.cfg) body = gen_repo() body['distributors'] = [gen_distributor()] cls.repo = client.post(REPOSITORY_PATH, body).json()
def test_all(self): """Package manager can consume RPM with rich/weak dependencies from Pulp.""" cfg = config.get_config() if cfg.pulp_version < Version('2.17'): raise unittest.SkipTest('This test requires Pulp 2.17 or newer.') if not rpm_rich_weak_dependencies(cfg): raise unittest.SkipTest('This test requires RPM 4.12 or newer.') client = api.Client(cfg, api.json_handler) body = gen_repo( importer_config={'feed': RPM_RICH_WEAK_FEED_URL}, distributors=[gen_distributor()] ) repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) sync_repo(cfg, repo) publish_repo(cfg, repo) repo_path = gen_yum_config_file( cfg, baseurl=urljoin(cfg.get_base_url(), urljoin( 'pulp/repos/', repo['distributors'][0]['config']['relative_url'] )), name=repo['_href'], repositoryid=repo['id'] ) cli_client = cli.Client(cfg) self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True) rpm_name = 'Cobbler' pkg_mgr = cli.PackageManager(cfg) pkg_mgr.install(rpm_name) self.addCleanup(pkg_mgr.uninstall, rpm_name) rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-') self.assertEqual(rpm_name, rpm[0])
def test_update_checksum_type(self): """Check if RPM distributor can receive null checksum_type. See: https://pulp.plan.io/issues/2134. """ cfg = config.get_config() if cfg.pulp_version < version.Version('2.9'): raise unittest.SkipTest('This test requires Pulp 2.9 or above.') client = api.Client(cfg, api.json_handler) distributor = gen_distributor() body = gen_repo() body['distributors'] = [distributor] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) for checksum_type in (None, 'sha256', None): client.put( repo['_href'], { 'distributor_configs': { distributor['distributor_id']: { 'checksum_type': checksum_type, } } }) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual( repo['distributors'][0]['config'].get('checksum_type'), checksum_type)
def make_repo(self, cfg, dist_cfg_updates): """Create a repository with an importer and pair of distributors. Create an RPM repository with: * A yum importer with a valid feed. * A yum distributor. * An RPM rsync distributor referencing the yum distributor. In addition, schedule the repository for deletion. :param cfg: Information about the Pulp deployment being targeted. :param dist_cfg_updates: A dict to be merged into the RPM rsync distributor's ``distributor_config`` dict. At a minimum, this argument should have a value of ``{'remote': {…}}``. :returns: A detailed dict of information about the repo. """ api_client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] body['distributors'].append({ 'distributor_id': utils.uuid4(), 'distributor_type_id': 'rpm_rsync_distributor', 'distributor_config': { 'predistributor_id': body['distributors'][0]['distributor_id'], } }) body['distributors'][1]['distributor_config'].update(dist_cfg_updates) repo = api_client.post(REPOSITORY_PATH, body) self.addCleanup(api_client.delete, repo['_href']) return api_client.get(repo['_href'], params={'details': True})
def test_all(self): """Verify ``RPM_LARGE_METADATA`` RPM file can be uploaded. Specifically, this method does the following: 1. Create an RPM repo. 2. Verify whether the file ``RPM_LARGE_METADATA`` can be uploaded into the repo without errors. This test targets: * `Pulp #723 <https://pulp.plan.io/issues/723>`_ * `Pulp-2-Tests #88 <https://github.com/PulpQE/Pulp-2-Tests/issues/88>`_ """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) body = gen_repo(distributors=[gen_distributor()]) repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) rpm = utils.http_get(RPM_LARGE_METADATA_FEED) upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo) repo = client.get(repo['_href'], params={'details': True}) publish_repo(cfg, repo) rpm_path = get_rpm_published_path(cfg, repo, RPM_LARGE_METADATA) # Check whether the RPM is uploaded published. self.assertIn(RPM_LARGE_METADATA, rpm_path, rpm_path)
def test_all(self): """Upload a package group to a repository twice.""" cfg = config.get_config() if check_issue_3104(cfg): self.skipTest('https://pulp.plan.io/issues/3104') client = api.Client(cfg, api.json_handler) self.addCleanup(client.delete, ORPHANS_PATH) # Create a repository. body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) # Give the repository a package group, and publish the repository. package_group = {'id': utils.uuid4(), 'name': utils.uuid4()} _upload_import_package_group(cfg, repo, package_group) repo = client.get(repo['_href'], params={'details': True}) publish_repo(cfg, repo) # Update the repository's package group, and re-publish the repository. package_group['name'] = utils.uuid4() _upload_import_package_group(cfg, repo, package_group) publish_repo(cfg, repo) # Fetch the generated repodata of type 'group' (a.k.a. 'comps'). Verify # the package group portion. root_element = get_repodata(cfg, repo['distributors'][0], 'group') groups = root_element.findall('group') self.assertEqual(len(groups), 1, ElementTree.tostring(root_element)) for key, value in package_group.items(): with self.subTest(key=key): self.assertEqual(groups[0].find(key).text, value)
def do_test(self, distributor_config_update): """Implement most of the test logic.""" rpms = tuple( utils.http_get(url) for url in (RPM_UNSIGNED_URL, RPM2_UNSIGNED_URL)) # Create a repository. client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] body['distributors'][0]['distributor_config'].update( distributor_config_update) repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) # Upload an RPM, publish the repo, and count metadata files twice. cli_client = cli.Client(self.cfg) sudo = () if cli.is_root(self.cfg) else ('sudo', ) find_repodata_cmd = sudo + ( 'find', os.path.join('/var/lib/pulp/published/yum/master/yum_distributor/', str(repo['id'])), '-type', 'd', '-name', 'repodata') found = [] for rpm in rpms: upload_import_unit(self.cfg, rpm, {'unit_type_id': 'rpm'}, repo) publish_repo(self.cfg, repo) repodata_path = cli_client.run(find_repodata_cmd).stdout.strip() found.append( cli_client.run(sudo + ('find', repodata_path, '-type', 'f')).stdout.splitlines()) return found
def test_broken_simlinks(self): """Test broken symlinks.""" client = api.Client(self.cfg, api.json_handler) body = gen_repo( importer_config={'feed': RPM_YUM_METADATA_FILE}, distributors=[gen_distributor(auto_publish=True)] ) repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) sync_repo(self.cfg, repo) repo = client.get(repo['_href'], params={'details': True}) # Assert that there is a yum_repo_metadata file present in the repo. self.assertEqual( repo['content_unit_counts']['yum_repo_metadata_file'], 1, repo ) path = os.path.join( '/var/lib/pulp/published/yum/https/repos/', repo['distributors'][0]['config']['relative_url'], 'repodata' ) # Assert that the productid was not saved as symlink productid_symlink = self.find_productid(True, path) self.assertEqual(len(productid_symlink), 0, productid_symlink) # Assert that the productid was saved as a file productid_file = self.find_productid(False, path) self.assertEqual(len(productid_file), 1, productid_symlink)
def _create_distributor(cfg, href, distributor_type_id, checksum_type=None): """Create an export distributor for the entity at ``href``.""" path = urljoin(href, 'distributors/') body = gen_distributor() body['distributor_type_id'] = distributor_type_id if checksum_type is not None: body['distributor_config']['checksum_type'] = checksum_type return api.Client(cfg).post(path, body).json()
def test_all(self): """Test whether copied files retain their original mtime. This test targets the following issues: * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_ * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_ Do the following: 1. Create, sync and publish a repository, with ``generate_sqlite`` set to true. 2. Get the ``mtime`` of the sqlite files. 3. Upload an RPM package into the repository, and sync the repository. 4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes are the same. """ cfg = config.get_config() if not selectors.bug_is_fixed(2783, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2783') # Create, sync and publish a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] body['distributors'][0]['distributor_config']['generate_sqlite'] = True repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) sync_repo(cfg, repo) publish_repo(cfg, repo) # Get the mtime of the sqlite files. cli_client = cli.Client(cfg, cli.echo_handler) cmd = '' if cli.is_root(cfg) else 'sudo ' cmd += "bash -c \"stat --format %Y '{}'/*\"".format( os.path.join( _PATH, repo['distributors'][0]['config']['relative_url'], 'repodata', )) # machine.session is used here to keep SSH session open mtimes_pre = ( cli_client.machine.session().run(cmd)[1].strip().split().sort()) # Upload to the repo, and sync it. rpm = utils.http_get(RPM_SIGNED_URL) upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo) sync_repo(cfg, repo) # Get the mtime of the sqlite files again. time.sleep(1) # machine.session is used here to keep SSH session open mtimes_post = ( cli_client.machine.session().run(cmd)[1].strip().split().sort()) self.assertEqual(mtimes_pre, mtimes_post)
def setUpClass(cls): """Create several schedules. Each schedule is created to test a different failure scenario. """ super().setUpClass() client = api.Client(cls.cfg) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body).json() cls.resources.add(repo['_href']) sync_repo(cls.cfg, repo) # Add a distibutor distributor = gen_distributor() client.post(urljoin(repo['_href'], 'distributors/'), distributor) client.response_handler = api.echo_handler cls.bodies = ( { 'schedule': None }, # 400 { 'unknown': 'parameter', 'schedule': 'PT30S' }, # 400 ['Incorrect data type'], # 400 { 'missing_required_keys': 'schedule' }, # 400 { 'schedule': 'PT30S' }, # tests incorrect distributor in url, 404 { 'schedule': 'PT30S' }, # tests incorrect repo in url, 404 ) scheduling_url = '/'.join([ 'distributors', distributor['distributor_id'], 'schedules/publish/' ]) bad_distributor_url = '/'.join( ['distributors', utils.uuid4(), 'schedules/publish/']) bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()]) cls.paths = (urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], bad_distributor_url), urljoin(bad_repo_path, scheduling_url)) cls.status_codes = (400, 400, 400, 400, 404, 404) cls.responses = [ client.post(path, req_body) for path, req_body in zip(cls.paths, cls.bodies) ]
def create_sync_repo(self, feed): """Create and sync a repository given a feed.""" body = gen_repo(importer_config={'feed': feed}, distributors=[gen_distributor()]) # Using on_demand since its the default policy used by Satellite body['importer_config']['download_policy'] = 'on_demand' repo = self.client.post(REPOSITORY_PATH, body) self.addCleanup(self.client.delete, repo['_href']) sync_repo(self.cfg, repo) return self.client.get(repo['_href'], params={'details': True})
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.client = api.Client(cls.cfg, api.json_handler) body = gen_repo(importer_config={'feed': RPM_UNSIGNED_FEED_URL}, distributors=[gen_distributor()]) cls.repo = cls.client.post(REPOSITORY_PATH, body) cls.repo = cls.client.get(cls.repo['_href'], params={'details': True}) sync_repo(cls.cfg, cls.repo) cls.errata = _gen_errata()
def test_all(self): """Sync a repo whose updateinfo file has multiple pkglist sections. Specifically, do the following: 1. Create, sync and publish an RPM repository whose feed is set to ``pulp_2_tests.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL``. 2. Fetch and parse the published repository's ``updateinfo.xml`` file. Verify that the ``updateinfo.xml`` file has three packages whose ``<filename>`` elements have the following text: * penguin-0.9.1-1.noarch.rpm * shark-0.1-1.noarch.rpm * walrus-5.21-1.noarch.rpm Note that Pulp is free to change the structure of a source repository at will. For example, the source repository has three ``<collection>`` elements, the published repository can have one, two or three ``<collection>`` elements. Assertions are not made about these details. """ cfg = config.get_config() if check_issue_3104(cfg): self.skipTest('https://pulp.plan.io/issues/3104') if not selectors.bug_is_fixed(2227, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2277') # Create, sync and publish a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) sync_repo(cfg, repo) publish_repo(cfg, repo) # Fetch and parse ``updateinfo.xml``. updates_element = (get_repodata(cfg, repo['distributors'][0], 'updateinfo')) # Verify the ``updateinfo.xml`` file. debug = ElementTree.tostring(updates_element) filename_elements = (updates_element.findall( 'update/pkglist/collection/package/filename')) filenames = [ filename_element.text for filename_element in filename_elements ] filenames.sort() self.assertEqual(filenames, [ 'penguin-0.9.1-1.noarch.rpm', 'shark-0.1-1.noarch.rpm', 'walrus-5.21-1.noarch.rpm', ], debug)
def setUpClass(cls): """Create an RPM repository with a feed and distributor.""" cls.cfg = config.get_config() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') cls.client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] cls.repo = cls.client.post(REPOSITORY_PATH, body) cls.repo = cls.client.get(cls.repo['_href'], params={'details': True})
def setUpClass(cls): """Create and sync a repository.""" super().setUpClass() client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) sync_repo(cls.cfg, repo) cls.repo = client.get(repo['_href'], params={'details': True})
def test_all(self): """Sync a repository, change its feed, and sync it again.""" if check_issue_3104(self.cfg): self.skipTest('https://pulp.plan.io/issues/3104') # Create, sync and publish repositories A and B. repos = [] for _ in range(2): body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] repos.append(self.create_sync_publish_repo(body)) # Create repository C, let it sync from repository A, and publish it. body = gen_repo() body['importer_config']['feed'] = self.get_feed(repos[0]) body['importer_config']['ssl_validation'] = False body['distributors'] = [gen_distributor()] repo = self.create_sync_publish_repo(body) # Update repository C. client = api.Client(self.cfg, api.json_handler) feed = self.get_feed(repos[1]) client.put(repo['importers'][0]['_href'], {'importer_config': { 'feed': feed }}) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual(repo['importers'][0]['config']['feed'], feed) # Sync and publish repository C. sync_repo(self.cfg, repo) publish_repo(self.cfg, repo) rpm = utils.http_get(RPM_UNSIGNED_URL) response = get_unit(self.cfg, repo['distributors'][0], RPM) with self.subTest(): self.assertIn(response.headers['content-type'], ('application/octet-stream', 'application/x-rpm')) with self.subTest(): self.assertEqual(rpm, response.content)
def create_sync_modular_repo(self): """Create a repo with feed pointing to modular data and sync it. :returns: repo data that is created and synced with modular content. """ body = gen_repo() body['importer_config']['feed'] = RPM_WITH_MODULES_FEED_URL body['distributors'] = [gen_distributor()] repo = self.client.post(REPOSITORY_PATH, body) self.addCleanup(self.client.delete, repo['_href']) sync_repo(self.cfg, repo) return self.client.get(repo['_href'], params={'details': True})
def test_01_set_up(self): """Create and publish a repo, and fetch and parse its ``repomd.xml``.""" client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] self.repo.update(client.post(REPOSITORY_PATH, body)) self.repo.update(client.get(self.repo['_href'], params={'details': True})) publish_repo(self.cfg, self.repo) type(self).root_element = get_repodata_repomd_xml( self.cfg, self.repo['distributors'][0], )
def test_all(self): """Check for content synced from a feed with PULP_DISTRIBUTION.xml.""" if self.cfg.pulp_version < version.Version('2.11.2'): self.skipTest( 'PULP_DISTRIBUTION.xml improved parsing is available on Pulp ' '2.11.2+') client = api.Client(self.cfg, api.json_handler) distributor = gen_distributor() distributor['auto_publish'] = True body = gen_repo() body['distributors'] = [distributor] body['importer_config'] = { 'feed': RPM_WITH_PULP_DISTRIBUTION_FEED_URL, } repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) sync_repo(self.cfg, repo) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual(repo['content_unit_counts']['distribution'], 1) cli_client = cli.Client(self.cfg, cli.code_handler) relative_url = repo['distributors'][0]['config']['relative_url'] pulp_distribution = cli_client.run(( 'cat', os.path.join( '/var/lib/pulp/published/yum/http/repos/', relative_url, 'PULP_DISTRIBUTION.xml', ), ), sudo=True).stdout # make sure published repository PULP_DISTRIBUTION.xml does not include # any extra file from the original repo's PULP_DISTRIBUTION.xml under # metadata directory self.assertNotIn('metadata/productid', pulp_distribution) release_info = cli_client.run(( 'cat', os.path.join( '/var/lib/pulp/published/yum/http/repos/', relative_url, 'release-notes/release-info', ), ), sudo=True).stdout response = requests.get( urljoin( urljoin(RPM_WITH_PULP_DISTRIBUTION_FEED_URL, 'release-notes/'), 'release-info', )) # make sure published repository has extra files outside the metadata # directory from the origiginal repo's PULP_DISTRIBUTION.xml self.assertEqual(release_info, response.text)
def test_update_on_copy(self): """Check if copying units into a repo updates ``last_unit_added``. Do the following: 1. Create a repository with a feed and sync it. 2. Create a second repository. Assert the second repository's ``last_unit_added`` attribute is null. 3. Copy a content unit from first repository to the second. Assert the second repository's ``last_unit_added`` attribute is non-null. 4. Publish the second repository. Assert its ``last_unit_added`` attribute is non-null. """ if not selectors.bug_is_fixed(2688, self.cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2688') # create a repo with a feed and sync it sync_repo(self.cfg, self.repo) self.repo = self.client.get(self.repo['_href'], params={'details': True}) # create a second repository body = gen_repo() body['distributors'] = [gen_distributor()] repo2 = self.client.post(REPOSITORY_PATH, body) self.addCleanup(self.client.delete, repo2['_href']) repo2 = self.client.get(repo2['_href'], params={'details': True}) with self.subTest(comment='after repository creation'): self.assertIsNone(repo2['last_unit_added']) # copy a content unit from the first repo to the second self.client.post( urljoin(repo2['_href'], 'actions/associate/'), { 'source_repo_id': self.repo['id'], 'criteria': { 'filters': { 'unit': { 'name': 'bear' } }, 'type_ids': ['rpm'], }, }) repo2 = self.client.get(repo2['_href'], params={'details': True}) with self.subTest(comment='after unit association'): self.assertIsNotNone(repo2['last_unit_added'], repo2) # publish the second repo publish_repo(self.cfg, repo2) repo2 = self.client.get(repo2['_href'], params={'details': True}) with self.subTest(comment='after repository publish'): self.assertIsNotNone(repo2['last_unit_added'], repo2)
def test_all(self): """Test that recursive copy of erratas copies RPM packages. This test targets the following issues: * `Pulp Smash #769 <https://github.com/PulpQE/pulp-smash/issues/769>`_ * `Pulp #3004 <https://pulp.plan.io/issues/3004>`_ Do the following: 1. Create and sync a repository with errata, and RPM packages. 2. Create second repository. 3. Copy units from from first repository to second repository using ``recursive`` as true, and filter ``type_id`` as ``erratum``. 4. Assert that RPM packages were copied. """ cfg = config.get_config() if not selectors.bug_is_fixed(3004, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/3004') repos = [] client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UPDATED_INFO_FEED_URL body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[0]['_href']) sync_repo(cfg, repos[0]) # Create a second repository. repos.append(client.post(REPOSITORY_PATH, gen_repo())) self.addCleanup(client.delete, repos[1]['_href']) # Copy data to second repository. client.post( urljoin(repos[1]['_href'], 'actions/associate/'), { 'source_repo_id': repos[0]['id'], 'override_config': { 'recursive': True }, 'criteria': { 'filters': {}, 'type_ids': ['erratum'] }, }, ) # Assert that RPM packages were copied. units = search_units(cfg, repos[1], {'type_ids': ['rpm']}) self.assertGreater(len(units), 0)
def test_all(self): """Publish a repository with the repoview feature on and off.""" cfg = config.get_config() if cfg.pulp_version < Version('2.9'): self.skipTest('https://pulp.plan.io/issues/189') if utils.fips_is_supported(cfg) and utils.fips_is_enabled(cfg): self.skipTest('https://pulp.plan.io/issues/3775') # Create a repo, and add content client = api.Client(cfg) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body).json() self.addCleanup(client.delete, repo['_href']) rpm = utils.http_get(RPM_UNSIGNED_URL) upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo) # Get info about the repo distributor repo = client.get(repo['_href'], params={'details': True}).json() pub_path = urljoin('/pulp/repos/', repo['distributors'][0]['config']['relative_url']) # Publish the repo publish_repo(cfg, repo) response = client.get(pub_path) with self.subTest(comment='first publish'): self.assertEqual(len(response.history), 0, response.history) # Publish the repo a second time publish_repo( cfg, repo, { 'id': repo['distributors'][0]['id'], 'override_config': { 'generate_sqlite': True, 'repoview': True }, }) response = client.get(pub_path) with self.subTest(comment='second publish'): self.assertEqual(len(response.history), 1, response.history) self.assertEqual( response.request.url, urljoin(response.history[0].request.url, 'repoview/index.html')) # Publish the repo a third time if not selectors.bug_is_fixed(2349, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2349') publish_repo(cfg, repo) response = client.get(pub_path) with self.subTest(comment='third publish'): self.assertEqual(len(response.history), 0, response.history)
def _create_sync_repo(self, cfg): """Create and sync a repository. Return a detailed dict of repo info. Also, schedule the repository for deletion with ``addCleanup()``. """ client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) sync_repo(cfg, repo) return client.get(repo['_href'], params={'details': True})