def test_create_tag(self): """Check if a tag can be created.""" tag_name = utils.uuid4() random_manifest = random.choice(search_units( self.cfg, self.repo, {'type_ids': ['docker_manifest']})) # Create the tag import_upload(self.cfg, self.repo, { 'unit_type_id': 'docker_tag', 'unit_key': { 'repo_id': self.repo['id'], 'name': tag_name, }, 'unit_metadata': { 'name': tag_name, 'digest': random_manifest['metadata']['digest'], }, }) # Fetch the created tag tag = search_units(self.cfg, self.repo, { 'type_ids': ['docker_tag'], 'filters': {'unit': {'name': tag_name}}, }) self.assertEqual(len(tag), 1) tag = tag.pop() self.assertEqual( tag['metadata']['manifest_digest'], random_manifest['metadata']['digest'] ) self.assertEqual(len(self._get_tags()), len(self.tags) + 1)
def test_defaults(self): """Verify that default parameters are correctly set.""" with mock.patch.object(api, "Client") as client: search_units(mock.Mock(), {"_href": "foo/bar/"}) self.assertEqual(client.call_args[0][1], api.json_handler) self.assertEqual(client.return_value.post.call_args[0][1], {"criteria": {}})
def test_recursive_noconservative_dependency(self): """Recursive, non-conservative, and ``walrus-0.71`` on B. Do the following: 1. Copy ``chimpanzee`` RPM package from repository A to B using: ``recursive`` as True, ``recursive_conservative`` as False, and an older version of walrus package is present on the repo B before the copy. 2. Assert that total number of RPM of units copied is equal to ``6``, and the walrus package version is equal to both ``5.21`` and ``0.71``. Additional permutation added as ``--recursive`` should ensure the ``latest`` version of the RPM is also copied. """ repo = self.copy_units(True, False, True) # Versions of modules expected to be returned expected_versions = ['5.21', '0.71'] # Search and return RPM packages after copied on B versions = [ unit['metadata']['version'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']}) if unit['metadata']['name'] == 'walrus' ] self.assertEqual(len(versions), 2, versions) self.assertEqual(sorted(versions), sorted(expected_versions), versions) dst_unit_ids = [ unit['metadata']['name'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']}) ] # Expect to find one more unit since old and new version of # walrus are now on repo B self.assertEqual(len(dst_unit_ids), 6, dst_unit_ids)
def test_norecursive_conservative_dependency(self): """Non-recursive, conservative, with old dependency. Do the following: 1. Copy ``chimpanzee`` RPM package from repository A to B using: ``recursive`` as False, ``recursive_conservative`` as True, and an older version of walrus package is present on the repo B before the copy. 2. Assert that total number of RPM of units is equal to ``5``, and the walrus package version is equal to ``0.71``. """ repo = self.copy_units(False, True, True) versions = [ unit['metadata']['version'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']}) if unit['metadata']['name'] == 'walrus' ] self.assertEqual(len(versions), 1, versions) self.assertEqual(versions[0], '0.71', versions) dst_unit_ids = [ unit['metadata']['name'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']}) ] self.assertEqual(len(dst_unit_ids), 5, dst_unit_ids)
def test_03_compare_repos(self): """Verify the two repositories contain the same content unit.""" repo_0_units = search_units(self.cfg, self.repos[0]) repo_1_units = search_units(self.cfg, self.repos[1]) self.assertEqual( repo_0_units[0]['unit_id'], repo_1_units[0]['unit_id'], )
def test_defaults(self): """Verify that default parameters are correctly set.""" with mock.patch.object(api, 'Client') as client: search_units(mock.Mock(), {'_href': 'foo/bar/'}) self.assertEqual(client.call_args[0][1], api.json_handler) self.assertEqual( client.return_value.post.call_args[0][1], {'criteria': {}}, )
def test_02_copy_tags_user_metadata(self): """Copy tags with user_metadata from one repository to another. Assert the user metadata associated with a tag is present in both repositories. Steps: 1. Add user metadata to the first tag in the source repo. 2. Copy the tags from one repo to the other. 3. Verify that the user_metadata is copied to the other repo. This test targets the following * `Pulp #3242 <https://pulp.plan.io/issues/3242>`_. * `Pulp-2-tests #72 <https://github.com/PulpQE/Pulp-2-Tests/issues/72>`_. """ if not selectors.bug_is_fixed(3892, self.cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/3892') # Step 1 tag_first_repo = search_units( self.cfg, self.repo, {'type_ids': ['docker_tag']} )[0] user_metadata = { 'dummy_key_1': 'dummy_value_1', 'dummy_key_2': 'dummy_value_2', } self.set_user_metadata(tag_first_repo, user_metadata) # Step 2 repo = self.client.post(REPOSITORY_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) self.client.post(urljoin(repo['_href'], 'actions/associate/'), { 'source_repo_id': self.repo['id'], 'criteria': {'filters': {}, 'type_ids': ['docker_tag']}, }) units = search_units( self.cfg, repo, { 'type_ids': ['docker_tag'], 'filters': { 'unit': { 'name': tag_first_repo['metadata']['name'], 'schema_version': tag_first_repo['metadata']['schema_version'] } }, }) # Step 3 self.assertEqual(units[0]['metadata']['pulp_user_metadata'], user_metadata, units)
def test_all(self): """Verify how ISO repos handles changes in content already in Pulp. The main goal of this test is to verify how ISO repository handles updates in content already in Pulp. For this test two different feed urls will be used. These urls contain the same amount of units, the units have the same type and name in both urls, but different content thereafter different checksum values. To recreate this dynamic scenario of change. After the repository is synced for the first time the feed url is updated, and the repository is synced again. This test targets the following issues: * `Pulp Smash #715 <https://github.com/PulpQE/pulp-smash/issues/715>`_ * `Pulp #2773 <https://pulp.plan.io/issues/2773>`_ * `Pulp #3047 <https://pulp.plan.io/issues/3047>`_ * `Pulp #3100 <https://pulp.plan.io/issues/3100>`_ Do the following: 1. Create and sync an ISO repository. 2. Update the repository's feed URL, and sync it. This simulates a change in the contents of the source ISOs. 3. Assert that number of units remain the same, but the content has changed. """ cfg = config.get_config() for issue_id in (2773, 3047, 3100, 4857, 4865): if not selectors.bug_is_fixed(issue_id, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/' + str(issue_id)) # Step 1 client = api.Client(cfg, api.json_handler) repo = client.post(REPOSITORY_PATH, _gen_iso_repo(FILE_FEED_URL)) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) sync_repo(cfg, repo) units_pre = search_units(cfg, repo) # Step 2 client.put(repo['importers'][0]['_href'], {'importer_config': { 'feed': FILE2_FEED_URL }}) sync_repo(cfg, repo) units_post = search_units(cfg, repo) # Step 3 self.assertEqual(len(units_pre), len(units_post)) self.check_names(units_pre, units_post) self.check_checksums(units_pre, units_post)
def test_all(self): """Copy content between OSTree repositories with a filter. Do the following: 1. Create a pair of repositories, and populate the first. 2. Randomly select a unit from the first repository, and copy it to the second repository. 3. Verify that the selected unit is the only one in the second repository. """ repos = [] cfg = config.get_config() client = api.Client(cfg, api.json_handler) # Create and populate a source repository. body = gen_repo() body['importer_config']['feed'] = OSTREE_FEED body['importer_config']['branches'] = OSTREE_BRANCHES body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[0]['_href']) sync_repo(cfg, repos[0]) # Create a destination repository. repos.append(client.post(REPOSITORY_PATH, gen_repo())) self.addCleanup(client.delete, repos[1]['_href']) # Copy a random unit between the repos, and verify the result. src_unit_id = random.choice(search_units(cfg, repos[0]))['metadata']['_id'] client.post( urljoin(repos[1]['_href'], 'actions/associate/'), { 'source_repo_id': repos[0]['id'], 'criteria': { 'filters': { 'unit': { '_id': src_unit_id } }, 'type_ids': ['ostree'], }, }) dst_unit_ids = [ unit['metadata']['_id'] for unit in search_units(cfg, repos[1], {'type_ids': ['ostree']}) ] self.assertEqual([src_unit_id], dst_unit_ids)
def test_all(self): """Check if Pulp only associate missing repo content.""" cfg = config.get_config() if cfg.pulp_version < Version('2.11'): self.skipTest( 'Selective association is available on Pulp 2.11+ see Pulp ' '#2457 for more information' ) client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) sync_repo(cfg, repo) rpm_units = ( _get_units_by_type(search_units(cfg, repo), 'rpm') ) # Let's select up to 1/5 of the available units to remove to_remove = random.sample( rpm_units, random.randrange(int(RPM_UNSIGNED_FEED_COUNT / 4))) for unit in to_remove: _remove_unit(cfg, repo, unit) report = client.post(urljoin(repo['_href'], 'actions/sync/')) tasks = tuple(api.poll_spawned_tasks(cfg, report)) self.assertEqual(len(tasks), 1, tasks) self.assertEqual( tasks[0]['result']['added_count'], len(to_remove), to_remove)
def test_all(self): """Test that uploading DRPM with checksumtype specified works.""" if not selectors.bug_is_fixed(1806, self.cfg.pulp_version): raise unittest.SkipTest('https://pulp.plan.io/issues/1806') if not selectors.bug_is_fixed(2627, self.cfg.pulp_version): raise unittest.SkipTest('https://pulp.plan.io/issues/2627') client = api.Client(self.cfg) repo = client.post(REPOSITORY_PATH, gen_repo()).json() self.addCleanup(client.delete, repo['_href']) drpm = utils.http_get(DRPM_UNSIGNED_URL) upload_import_unit( self.cfg, drpm, { 'unit_type_id': 'drpm', 'unit_metadata': { 'checksumtype': 'sha256' }, }, repo, ) units = search_units(self.cfg, repo, {}) self.assertEqual(len(units), 1, units) # Test if DRPM extracted correct metadata for creating filename. self.assertEqual( units[0]['metadata']['filename'], DRPM, )
def test_update_tag_another_repo(self): """Check if tagging fail for a manifest from another repo.""" other = create_docker_repo(self.cfg, 'library/swarm') self.addCleanup(api.Client(self.cfg).delete, other['_href']) sync_repo(self.cfg, other) other = api.Client(self.cfg, api.json_handler).get( other['_href'], params={'details': True}) other_manifest = random.choice(search_units( self.cfg, other, {'type_ids': ['docker_manifest']})) tag_name = utils.uuid4() with self.assertRaises(TaskReportError) as context: import_upload(self.cfg, self.repo, { 'unit_type_id': 'docker_tag', 'unit_key': { 'repo_id': self.repo['id'], 'name': tag_name, }, 'unit_metadata': { 'name': tag_name, 'digest': other_manifest['metadata']['digest'], }, }) self.assertEqual( 'Manifest with digest {} could not be found in repository {}.' .format(other_manifest['metadata']['digest'], self.repo['id']), context.exception.task['error']['description'] ) self.assertEqual(len(self._get_tags()), len(self.tags))
def do_test(self, feed, type_ids): """Remove units from a repo and make assertions about it. Do the following: 1. Create and sync a repository with the given ``feed``. 2. For each type ID in ``type_ids``, remove a content unit of that type from the repository. See :meth:`do_remove_unit`. 3. Assert the correct units are still in the repository. The repository should have all the units that were originally synced into the repository, minus those that have been removed. 4. Remove a non-existent unit from the repository. Assert that the ``last_unit_removed`` timestamp was not updated. """ body = gen_repo() body['importer_config']['feed'] = feed client = api.Client(self.cfg, api.json_handler) repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) sync_repo(self.cfg, repo) self.initial_units = search_units(self.cfg, repo) for type_id in type_ids: with self.subTest(type_id=type_id): self.do_remove_unit(type_id, repo) with self.subTest('remaining units'): removed_ids = {_get_unit_id(unit) for unit in self.removed_units} remaining_ids = { _get_unit_id(unit) for unit in search_units(self.cfg, repo) } self.assertEqual(removed_ids & remaining_ids, set()) if selectors.bug_is_fixed(2630, self.cfg.pulp_version): with self.subTest('last removed unit'): lur_before = self.get_repo_last_unit_removed(repo) time.sleep(1) # ensure last_unit_removed increments # Select an unit and mess it up unit = random.choice(self.initial_units).copy() unit_id_name, _ = _get_unit_id(unit) unit['metadata'][unit_id_name] = utils.uuid4() # Remove the non-existent unit _remove_unit(self.cfg, repo, unit) lur_after = self.get_repo_last_unit_removed(repo) self.assertEqual(lur_before, lur_after)
def verify_package_types(self, cfg, repo): """Assert sdist and bdist_wheel shelf-reader packages were synced. This test targets `Pulp #1883 <https://pulp.plan.io/issues/1883>`_. """ units = search_units(cfg, repo) unit_types = {unit['metadata']['packagetype'] for unit in units} self.assertEqual(unit_types, {'sdist', 'bdist_wheel'})
def search_docker_units(self, repo, unit_type): """Return docker units filtered by type.""" return search_units(self.cfg, repo, { 'type_ids': [unit_type], 'filters': { 'unit': {} } })
def test_update_tag(self): """Check if a tag can be updated to a new manifest. Do the following: 1. Find the tag in this test's docker repository whose name is "latest." Make note of the manifest it references. 2. Pick some other manifest. Update the repository so that the "latest" tag references the chosen manifest. 3. Find the tag in this test's docker repository whose name is "latest." Assert it references the chosen manifest. """ # Find the "latest" tag. tag = self.get_latest_tags() self.assertEqual(len(tag), 1) tag = tag.pop() # Find the manifest the "latest" tag references. old_manifest = search_units(self.cfg, self.repo, { 'type_ids': ['docker_manifest'], 'filters': { 'unit': {'digest': tag['metadata']['manifest_digest']} }, }) self.assertEqual(len(old_manifest), 1) old_manifest = old_manifest.pop() # Pick a new manifest. manifests = self.get_manifests() manifests.remove(old_manifest) new_manifest = random.choice(manifests) # Make the "latest" tag reference the new manifest. import_upload(self.cfg, self.repo, { # Identify the tag being updated... 'unit_key': { 'name': tag['metadata']['name'], 'repo_id': tag['metadata']['repo_id'], }, 'unit_type_id': 'docker_tag', # ...and provide changed attributes. 'unit_metadata': { 'digest': new_manifest['metadata']['digest'], 'name': tag['metadata']['name'], }, }) # Find the "latest" tag. tag = self.get_latest_tags() self.assertEqual(len(tag), 1) tag = tag.pop() # Assert the tag references the correct manifest. tag_digest = tag['metadata']['manifest_digest'] with self.subTest(): self.assertNotEqual(tag_digest, old_manifest['metadata']['digest']) with self.subTest(): self.assertEqual(tag_digest, new_manifest['metadata']['digest'])
def get_manifests(self): """Return all manifests in this test's repo. If the Pulp system under test is version 2.13 or newer, only return schema v1 manifests. See :meth:`get_latest_tags`. """ criteria = {'type_ids': ['docker_manifest']} if self.cfg.pulp_version >= Version('2.13'): criteria['filters'] = {'unit': {'schema_version': 1}} return search_units(self.cfg, self.repo, criteria)
def test_non_recursive(self): """Test simple copy of an unit for a repository with rich/weak depdendencies. See :meth:`do_test`." """ repo = self.do_test(False) dst_unit_ids = [ unit['metadata']['name'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']})] self.assertEqual(len(dst_unit_ids), 1, dst_unit_ids)
def test_all(self): """Test that recursive copy of erratas copies RPM packages. This test targets the following issues: * `Pulp Smash #769 <https://github.com/PulpQE/pulp-smash/issues/769>`_ * `Pulp #3004 <https://pulp.plan.io/issues/3004>`_ Do the following: 1. Create and sync a repository with errata, and RPM packages. 2. Create second repository. 3. Copy units from from first repository to second repository using ``recursive`` as true, and filter ``type_id`` as ``erratum``. 4. Assert that RPM packages were copied. """ cfg = config.get_config() if not selectors.bug_is_fixed(3004, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/3004') repos = [] client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UPDATED_INFO_FEED_URL body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[0]['_href']) sync_repo(cfg, repos[0]) # Create a second repository. repos.append(client.post(REPOSITORY_PATH, gen_repo())) self.addCleanup(client.delete, repos[1]['_href']) # Copy data to second repository. client.post( urljoin(repos[1]['_href'], 'actions/associate/'), { 'source_repo_id': repos[0]['id'], 'override_config': { 'recursive': True }, 'criteria': { 'filters': {}, 'type_ids': ['erratum'] }, }, ) # Assert that RPM packages were copied. units = search_units(cfg, repos[1], {'type_ids': ['rpm']}) self.assertGreater(len(units), 0)
def test_recursive(self): """Test recursive copy for a repository with rich/weak dependencies. See :meth:`do_test`." """ repo = self.do_test(True, False) dst_unit_ids = [ unit['metadata']['name'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']}) ] self.assertEqual(len(dst_unit_ids), RPM2_RICH_WEAK_DATA['total_installed_packages'], dst_unit_ids)
def test_additional_repos_copy_one_destination(self): """Multiple source repositories recursive copy - one repo destination. 1. Copy ``walrus - 0.71`` module to repository 3, and all the dependencies RPMS should be solved and copied as well. """ repo_1 = self.create_sync_repo(RPM_WITH_MODULES_MODIFIED_FEED_URL) repo_2 = self.create_sync_repo(RPM_UNSIGNED_MODIFIED_FEED_URL) repo_3 = self.client.post(REPOSITORY_PATH, gen_repo()) self.addCleanup(self.client.delete, repo_3['_href']) self.client.post( urljoin(repo_3['_href'], 'actions/associate/'), { 'source_repo_id': repo_1['id'], 'override_config': { 'recursive_conservative': True, 'additional_repos': { repo_2['id']: repo_3['id'] }, }, 'criteria': { 'filters': { 'unit': { '$and': [{ 'name': MODULE_FIXTURES_PACKAGE_STREAM['name'], 'stream': MODULE_FIXTURES_PACKAGE_STREAM['stream'], }] } }, 'type_ids': ['modulemd'], }, }, ) repo_3 = self.client.get(repo_3['_href'], params={'details': True}) repo_3_rpms = sorted([ unit['metadata']['filename'] for unit in search_units(self.cfg, repo_3, {'type_ids': ['rpm']}) ]) self.assertEqual(repo_3_rpms, RPM_PACKAGES_MULTIPLE_REPOS, repo_3_rpms) self.assertEqual(repo_3['content_unit_counts']['modulemd'], 1, repo_3) self.assertEqual(repo_3['content_unit_counts']['modulemd_defaults'], 1, repo_3)
def test_recursive_conservative(self): """Test recursive, conservative copy for rich/weak dependencies. See :meth:`do_test`." """ if self.cfg.pulp_version < Version('2.18.1'): raise unittest.SkipTest('This test requires Pulp 2.18.1 or newer.') repo = self.do_test(True, True) dst_unit_ids = [ unit['metadata']['name'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']}) ] self.assertEqual(len(dst_unit_ids), RPM2_RICH_WEAK_DATA['total_installed_packages'], dst_unit_ids)
def test_all(self): """Verify errata packages appearing in more than one repository. Do the following: 1. Sync 2 different repositories with same errata name, but different pkglist. 2. Search for the errata name. 3. Assert that RPMS present in both repositories will be returned if an erratum exists in multiple repositories. """ cfg = config.get_config() if cfg.pulp_version < Version('2.20'): raise unittest.SkipTest('This test requires Pulp 2.20 or newer.') client = api.Client(cfg, api.json_handler) repos = [] urls = [ RPM_PACKAGES_UPDATEINFO_FEED_URL, RPM_PKGLISTS_UPDATEINFO_FEED_URL, ] for url in urls: body = gen_repo() body['importer_config']['download_policy'] = 'on_demand' body['importer_config']['feed'] = url body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[-1]['_href']) sync_repo(cfg, repos[-1]) units = search_units( cfg, repos[0], { 'type_ids': ['erratum'], 'filters': { 'unit': { 'id': ERRATA_PACKAGES_UPDATEINFO['errata'] } }, }, ) package_names = sorted([ package['filename'] for package in units[0]['metadata']['pkglist'][0]['packages'] ]) self.assertEqual(ERRATA_PACKAGES_UPDATEINFO['packages'], package_names, package_names)
def get_docker_units_count(self, repo, unit_type): """Return docker units filtered by type.""" units = {} for unit in unit_type: units[unit] = (len( search_units( self.cfg, repo, { 'type_ids': [unit], 'filters': { 'unit': {} } }, ))) return units
def _find_unit(self, repo_href, pkg_url): """Search the given repository for a package. Search the repository for content units having the same filename as ``pkg_url``, verify only one result is found, and return it. """ pkg_filename = _get_pkg_filename(pkg_url) pkg_unit_type = _get_pkg_unit_type(pkg_filename) if pkg_unit_type == 'drpm': # This is the location of the package relative to the repo root. pkg_filename = 'drpms/' + pkg_filename units = search_units(self.cfg, {'_href': repo_href}, { 'filters': {'unit': {'filename': {'$in': [pkg_filename]}}}, 'type_ids': [pkg_unit_type], }) self.assertEqual(len(units), 1) return units[0]
def test_norecursive_noconservative_nodependency(self): """Non-recursive, non-conservative, and no old dependency. Do the following: 1. Copy ``chimpanzee`` RPM package from repository A to B using: ``recursive`` as False, ``recursive_conservative`` as False, and no older version of walrus package is present on the repo B before the copy. 2. Assert that total number of RPM of units copied is equal to ``1``. """ repo = self.copy_units(False, False, False) dst_unit_ids = [ unit['metadata']['name'] for unit in search_units(self.cfg, repo, {'type_ids': ['rpm']}) ] self.assertEqual(len(dst_unit_ids), 1, dst_unit_ids)
def test_all(self): """Import a DRPM into a repository and search it for content units.""" cfg = config.get_config() if not selectors.bug_is_fixed(1806, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/1806') client = api.Client(cfg) repo = client.post(REPOSITORY_PATH, gen_repo()).json() self.addCleanup(client.delete, repo['_href']) drpm = utils.http_get(DRPM_UNSIGNED_URL) upload_import_unit(cfg, drpm, {'unit_type_id': 'drpm'}, repo) units = search_units(cfg, repo) # Test if DRPM has been uploaded successfully self.assertEqual(len(units), 1) # Test if DRPM extracted correct metadata for creating filename self.assertEqual(units[0]['metadata']['filename'], DRPM)
def setUpClass(cls): """Import a SRPM into a repository and search it for content units. Specifically, this method does the following: 1. Create a yum repository. 2. Upload a SRPM into the repository. 3. Search for all content units in the repository. """ cfg = config.get_config() if check_issue_2620(cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/2620') cls.client = api.Client(cfg, api.json_handler) cls.repo = cls.client.post(REPOSITORY_PATH, gen_repo()) srpm = utils.http_get(SRPM_UNSIGNED_URL) upload_import_unit(cfg, srpm, {'unit_type_id': 'srpm'}, cls.repo) cls.units = search_units(cfg, cls.repo, {}, api.safe_handler)
def get_rpm_names_versions(cfg, repo): """Get a dict of a repository's RPMs and their versions. :param cfg: Information about a Pulp app. :param repo: A dict of information about a repository. :returns: The name and versions of each package in the repository, with the versions sorted in ascending order. For example: ``{'walrus': ['0.71', '5.21']}``. """ rpms = search_units(cfg, repo, {'type_ids': ['rpm']}) names_versions = {} for rpm in rpms: rpm_name = rpm['metadata']['name'] names_versions.setdefault(rpm_name, []) names_versions[rpm_name].append(rpm['metadata']['version']) for versions in names_versions.values(): versions.sort(key=Version) return names_versions
def test_validate_content_type(self): """Validates content_type contains correct content.""" cfg = config.get_config() if cfg.pulp_version < Version('2.19'): raise unittest.SkipTest('This test requires Pulp 2.19 or newer.') # Create, sync and publish docker repo repo = self.create_sync_publish_repo( cfg, { 'enable_v1': False, 'enable_v2': True, 'feed': DOCKER_V2_FEED_URL, 'upstream_name': get_upstream_name(cfg), }) # Get all tags from docker repo # Chose a random tag to check random_tag = random.choice( search_units( cfg, repo, {'type_ids': ['docker_tag']}, )) # Get the Header URL of the random tag url = '/pulp/docker/v2/{}/manifests/1/{}'.format( repo['display_name'], random_tag['metadata']['name'], ) # Get the Content-Type of that tag header = api.Client(cfg, api.code_handler).get(url).headers # Verify the header # application/vnd.docker.distribution.manifest.v1+prettyjws self.assertTrue( all([ strings in header['Content-Type'] for strings in [ 'application', 'vnd.docker', 'distribution', 'manifest', ] ]), header['Content-Type'])
def test_02_second_publish(self): """Add an additional content unit and publish the repository again.""" sync_repo(self.cfg, self.repo) publish_repo(self.cfg, self.repo) self.updateinfo_xml_hrefs.append(self.get_updateinfo_xml_href()) client = api.Client(self.cfg) with self.subTest(comment='check number of RPMs in repo'): units = (search_units(self.cfg, self.repo, {'type_ids': ('rpm', )})) self.assertEqual(len(units), RPM_UNSIGNED_FEED_COUNT) with self.subTest(comment='check updateinfo.xml has a new path'): # pylint:disable=no-value-for-parameter self.assertNotEqual(*self.updateinfo_xml_hrefs) with self.subTest(comment='check old updateinfo.xml is unavailable'): with self.assertRaises(HTTPError): client.get(self.updateinfo_xml_hrefs[0]) with self.subTest(comment='check new updateinfo.xml is available'): client.get(self.updateinfo_xml_hrefs[1])