def do_test(self, url): """Sync repositories with the file plugin. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is different from the previous one. 8. Assert that the same number of are present and that no units were added. """ repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(url) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo.latest_version_href repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY)
def do_sync(self, download_policy): """Sync repositories with the different ``download_policy``. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of possible units to be downloaded were shown. 6. Sync the remote one more time in order to create another repository version. 7. Assert that repository version is the same as the previous one. 8. Assert that the same number of units are shown, and after the second sync no extra units should be shown, since the same remote was synced again. """ # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(**{"policy": download_policy}) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo.latest_version_href sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY)
def do_test(self, policy): """Access lazy synced content on using content endpoint.""" # delete orphans to assure that no content units are present on the # file system delete_orphans() content_api = ContentFilesApi(self.client) repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(**{"policy": policy}) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Assert that no HTTP error was raised. # Assert that the number of units present is according to the synced # feed. content = content_api.list().to_dict()["results"] self.assertEqual(len(content), FILE_FIXTURE_COUNT, content)
def do_publish(self, download_policy): """Publish repository synced with lazy download policy.""" repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) publications = PublicationsFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(policy=download_policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = FileFilePublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) self.assertIsNotNone(publication.repository_version, publication)
def test_clean_orphan_content_unit(self): """Test whether orphaned content units can be cleaned up.""" repo_api = RepositoriesFileApi(self.api_client) remote_api = RemotesFileApi(self.api_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) content = choice(get_content(repo.to_dict())[FILE_CONTENT_NAME]) # Create an orphan content unit. repo_api.modify(repo.pulp_href, dict(remove_content_units=[content["pulp_href"]])) artifacts_api = ArtifactsApi(core_client) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact is present on disk. relative_path = artifacts_api.read(content["artifact"]).file artifact_path = os.path.join(self.media_root, relative_path) cmd = ("ls", artifact_path) self.cli_client.run(cmd, sudo=True) file_contents_api = ContentFilesApi(self.api_client) # Delete first repo version. The previous removed content unit will be # an orphan. delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertIn(content["pulp_href"], content_units_href) content_before_cleanup = file_contents_api.list().count orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 10}) monitor_task(orphans_response.task) # assert content was not removed content_after_cleanup = file_contents_api.list().count self.assertEqual(content_after_cleanup, content_before_cleanup) orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 0}) monitor_task(orphans_response.task) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertNotIn(content["pulp_href"], content_units_href) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact was removed from disk. with self.assertRaises(CalledProcessError): self.cli_client.run(cmd)
def set_distribution_base_path_and_download_a_content_unit_with_cert( file_distribution_href, base_path, file_repository_href, cert_path, content_path=None, url_encode=True): """ Set the base path on the `distribution, read the cert, urlencode it, and then request one unit. If `content_path` is set, that path will be requested, otherwise a random, valid content unit path will be selected from the FileRepository at `file_repository_href`. 1. Set the distribution referred to by `file_distribution_href` base_path to `base_path`. 2. Read the cert from the filesystem and urlencode it. 3. Make a request to `content_path` if specified, or to a random content item present in the `file_repository_href` repository. The urlencoded cert is submitted as the `X-CLIENT-CERT` header when requesting content. Args: file_distribution_href: The distribution href that is to be updated. This must refer to a distribution of type `FileDistribution`. base_path: The base path to set on the `distribution`. file_repository_href: The repository href that will have cert_path: The file system path to the certificate to be used in the content request. This will be read from the filesystem and urlencoded before being submitted as the `X-CLIENT-CERT` header when downloading content. content_path: The path to the specific content unit to be fetched. This is the portion of the url after the distribution URL. It's optional, and if unspecified a random, valid content unit will be selected instead from the repository. url_encode: If true, the certificate data read will be urlencoded, otherwise it won't be. This is an optional param, and defaults to True. Returns: The downloaded data. """ distribution = set_distribution_base_path(file_distribution_href, base_path) if content_path is None: file_client = gen_file_client() file_repos_api = RepositoriesFileApi(file_client) repo = file_repos_api.read(file_repository_href) content_path = choice(get_file_content_paths(repo.to_dict())) if url_encode: cert_data = read_cert_and_urlencode(cert_path) else: cert_data = read_cert(cert_path) return download_content_unit( config.get_config(), distribution.to_dict(), content_path, headers={'X-CLIENT-CERT': cert_data} )
def test_duplicate_file_sync(self): """Sync a repository with remotes containing same file names. This test does the following. 1. Create a repository in pulp. 2. Create two remotes containing the same file. 3. Check whether the created repo has only one copy of the file. This test targets the following issue: `Pulp #4738 <https://pulp.plan.io/issues/4738>`_ """ repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) # Step 1 repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) # Step 2 remote = remote_api.create(gen_file_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) remote2 = remote_api.create( gen_file_remote(url=FILE2_FIXTURE_MANIFEST_URL)) self.addCleanup(remote_api.delete, remote2.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) repository_sync_data = RepositorySyncURL(remote=remote2.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY)
def setUpClass(cls): """ Initialize Pulp to make authorization assertions using client certificates. 0. Create a FileRepository 1. Create a FileRemote 2. Sync in a few units we can use to fetch with 3. Create a Publication 4. Create a CertGuard with the CA cert used to sign all client certificates 5. Create a Distribution for the publication that is protected by the CertGuard """ cls.teardown_cleanups = [] cls.cfg = config.get_config() file_client = gen_file_client() repo_api = RepositoriesFileApi(file_client) remote_api = RemotesFileApi(file_client) publications = PublicationsFileApi(file_client) cls.distributions_api = DistributionsFileApi(file_client) cls.repo = repo_api.create(gen_repo()) cls.teardown_cleanups.append((repo_api.delete, cls.repo.pulp_href)) body = gen_file_remote(policy="immediate") remote = remote_api.create(body) cls.teardown_cleanups.append((remote_api.delete, remote.pulp_href)) # Sync a Repository repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(cls.repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) cls.repo = repo_api.read(cls.repo.pulp_href) # Create a publication. publish_data = FileFilePublication(repository=cls.repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] cls.teardown_cleanups.append((publications.delete, publication_href)) content_guard_href = cls._setup_content_guard() # Create a distribution. body = gen_distribution() body["publication"] = publication_href body["content_guard"] = content_guard_href distribution_response = cls.distributions_api.create(body) created_resources = monitor_task(distribution_response.task) cls.distribution = cls.distributions_api.read(created_resources[0]) cls.teardown_cleanups.append((cls.distributions_api.delete, cls.distribution.pulp_href))
def test_clean_specific_orphans(self): """Test whether the `content_hrefs` param removes specific orphans but not others""" repo_api = RepositoriesFileApi(self.api_client) remote_api = RemotesFileApi(self.api_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create two orphaned content units. content_a = get_content( repo.to_dict())[FILE_CONTENT_NAME][0]["pulp_href"] content_b = get_content( repo.to_dict())[FILE_CONTENT_NAME][1]["pulp_href"] content_to_remove = dict(remove_content_units=[content_a, content_b]) repo_api.modify(repo.pulp_href, content_to_remove) file_contents_api = ContentFilesApi(self.api_client) # Delete first repo version. The previous removed content unit will be an orphan. delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertIn(content_a, content_units_href) self.assertIn(content_b, content_units_href) content_hrefs_dict = {"content_hrefs": [content_a]} orphans_response = self.orphans_cleanup_api.cleanup(content_hrefs_dict) monitor_task(orphans_response.task) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertNotIn(content_a, content_units_href) self.assertIn(content_b, content_units_href)
def test_access_error(self): """HTTP error is not raised when accessing published data.""" repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) publications = PublicationsFileApi(self.client) distributions = DistributionsFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) remote = remote_api.create(gen_file_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = FileFilePublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) pulp_manifest = parse_pulp_manifest( self.download_pulp_manifest(distribution.to_dict(), "PULP_MANIFEST")) self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest)
def test_second_unit_replaces_the_first(self): """Create a duplicate content unit with different ``artifacts`` and same ``relative_path``. Artifacts are unique by ``relative_path`` and ``file``. """ delete_orphans() content_api = ContentFilesApi(self.client) repo_api = RepositoriesFileApi(self.client) versions_api = RepositoriesFileVersionsApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) artifact = gen_artifact() # create first content unit. content_attrs = gen_file_content_attrs(artifact) content_attrs["repository"] = repo.pulp_href response = content_api.create(**content_attrs) monitor_task(response.task) artifact = gen_artifact(file=__file__) # create second content unit. second_content_attrs = gen_file_content_attrs(artifact) second_content_attrs["repository"] = repo.pulp_href second_content_attrs["relative_path"] = content_attrs["relative_path"] response = content_api.create(**second_content_attrs) monitor_task(response.task) repo_latest_version = versions_api.read( repo_api.read(repo.pulp_href).latest_version_href) self.assertEqual( repo_latest_version.content_summary.present["file.file"]["count"], 1)
def test_clean_orphan_content_unit(self): """Test whether orphan content units can be clean up. Do the following: 1. Create, and sync a repo. 2. Remove a content unit from the repo. This will create a second repository version, and create an orphan content unit. 3. Assert that content unit that was removed from the repo and its artifact are present on disk. 4. Delete orphans. 5. Assert that the orphan content unit was cleaned up, and its artifact is not present on disk. """ repo_api = RepositoriesFileApi(self.api_client) remote_api = RemotesFileApi(self.api_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) content = choice(get_content(repo.to_dict())[FILE_CONTENT_NAME]) # Create an orphan content unit. repo_api.modify(repo.pulp_href, dict(remove_content_units=[content["pulp_href"]])) artifacts_api = ArtifactsApi(core_client) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact is present on disk. artifact_path = os.path.join( MEDIA_PATH, artifacts_api.read(content["artifact"]).file) cmd = ("ls", artifact_path) self.cli_client.run(cmd, sudo=True) file_contents_api = ContentFilesApi(self.api_client) # Delete first repo version. The previous removed content unit will be # an orphan. delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertIn(content["pulp_href"], content_units_href) delete_orphans() content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertNotIn(content["pulp_href"], content_units_href) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact was removed from disk. with self.assertRaises(CalledProcessError): self.cli_client.run(cmd)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() client = gen_file_client() repo_api = RepositoriesFileApi(client) remote_api = RemotesFileApi(client) publications = PublicationsFileApi(client) body = gen_file_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Step 1 repo = repo_api.read(repo.pulp_href) for file_content in get_content(repo.to_dict())[FILE_CONTENT_NAME]: modify_repo(cfg, repo.to_dict(), remove_units=[file_content]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) non_latest = choice(version_hrefs[:-1]) # Step 2 publish_data = FileFilePublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) # Step 3 self.assertEqual(publication.repository_version, version_hrefs[-1]) # Step 4 publish_data = FileFilePublication(repository_version=non_latest) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] publication = publications.read(publication_href) # Step 5 self.assertEqual(publication.repository_version, non_latest) # Step 6 with self.assertRaises(ApiException): body = { "repository": repo.pulp_href, "repository_version": non_latest } publications.create(body)
def do_test(self, policy): """Verify whether content served by pulp can be downloaded. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Select a random content unit in the distribution. Download that content unit from Pulp, and verify that the content unit has the same checksum when fetched directly from Pulp-Fixtures. This test targets the following issues: * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_ * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_ """ cfg = config.get_config() client = gen_file_client() repo_api = RepositoriesFileApi(client) remote_api = RemotesFileApi(client) publications = PublicationsFileApi(client) distributions = DistributionsFileApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = FileFilePublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task(distribution_response.task) distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) # Pick a file, and download it from both Pulp Fixtures… unit_path = choice(get_file_content_paths(repo.to_dict())) fixtures_hash = hashlib.sha256( utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path))).hexdigest() # …and Pulp. content = download_content_unit(cfg, distribution.to_dict(), unit_path) pulp_hash = hashlib.sha256(content).hexdigest() self.assertEqual(fixtures_hash, pulp_hash)
class RepoVersionRetentionTestCase(unittest.TestCase): """Test retain_repo_versions for repositories This test targets the following issues: * `Pulp #8368 <https:://pulp.plan.io/issues/8368>`_ """ @classmethod def setUp(self): """Add content to Pulp.""" self.cfg = config.get_config() self.client = api.Client(self.cfg, api.json_handler) self.core_client = CoreApiClient(configuration=self.cfg.get_bindings_config()) self.file_client = gen_file_client() self.content_api = ContentFilesApi(self.file_client) self.repo_api = RepositoriesFileApi(self.file_client) self.version_api = RepositoriesFileVersionsApi(self.file_client) self.distro_api = DistributionsFileApi(self.file_client) self.publication_api = PublicationsFileApi(self.file_client) delete_orphans() populate_pulp(self.cfg, url=FILE_LARGE_FIXTURE_MANIFEST_URL) self.content = sample(self.content_api.list().results, 3) self.publications = [] def _create_repo_versions(self, repo_attributes={}): self.repo = self.repo_api.create(gen_repo(**repo_attributes)) self.addCleanup(self.repo_api.delete, self.repo.pulp_href) if "autopublish" in repo_attributes and repo_attributes["autopublish"]: self.distro = create_distribution(repository_href=self.repo.pulp_href) self.addCleanup(self.distro_api.delete, self.distro.pulp_href) for content in self.content: result = self.repo_api.modify( self.repo.pulp_href, {"add_content_units": [content.pulp_href]} ) monitor_task(result.task) self.repo = self.repo_api.read(self.repo.pulp_href) self.publications += self.publication_api.list( repository_version=self.repo.latest_version_href ).results def test_retain_repo_versions(self): """Test repo version retention.""" self._create_repo_versions({"retain_repo_versions": 1}) versions = self.version_api.list(file_file_repository_href=self.repo.pulp_href).results self.assertEqual(len(versions), 1) latest_version = self.version_api.read( file_file_repository_version_href=self.repo.latest_version_href ) self.assertEqual(latest_version.number, 3) self.assertEqual(latest_version.content_summary.present["file.file"]["count"], 3) self.assertEqual(latest_version.content_summary.added["file.file"]["count"], 3) def test_retain_repo_versions_on_update(self): """Test repo version retention when retain_repo_versions is set.""" self._create_repo_versions() versions = self.version_api.list(file_file_repository_href=self.repo.pulp_href).results self.assertEqual(len(versions), 4) # update retain_repo_versions to 2 result = self.repo_api.partial_update(self.repo.pulp_href, {"retain_repo_versions": 2}) monitor_task(result.task) versions = self.version_api.list(file_file_repository_href=self.repo.pulp_href).results self.assertEqual(len(versions), 2) latest_version = self.version_api.read( file_file_repository_version_href=self.repo.latest_version_href ) self.assertEqual(latest_version.number, 3) self.assertEqual(latest_version.content_summary.present["file.file"]["count"], 3) self.assertEqual(latest_version.content_summary.added["file.file"]["count"], 1) def test_autodistribute(self): """Test repo version retention with autopublish/autodistribute.""" self._create_repo_versions({"retain_repo_versions": 1, "autopublish": True}) # all but the last publication should be gone for publication in self.publications[:-1]: with self.assertRaises(ApiException) as ae: self.publication_api.read(publication.pulp_href) self.assertEqual(404, ae.exception.status) # check that the last publication is distributed manifest = download_content_unit(self.cfg, self.distro.to_dict(), "PULP_MANIFEST") self.assertEqual(manifest.decode("utf-8").count("\n"), len(self.content))
class CRUDLabelTestCase(BaseLabelTestCase): """CRUD labels on repositories.""" @classmethod def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.repo = None def setUp(self): """Create an API client.""" self.client = FileApiClient(self.cfg.get_bindings_config()) self.repo_api = RepositoriesFileApi(self.client) def _create_repo(self, labels={}): attrs = {"name": str(uuid4())} if labels: attrs["pulp_labels"] = labels self.repo = self.repo_api.create(attrs) self.addCleanup(self.repo_api.delete, self.repo.pulp_href) def test_create_repo_with_labels(self): """Create repository with labels.""" labels = {"maiar": "mithrandir"} self._create_repo(labels) self.assertEqual(labels, self.repo.pulp_labels) def test_add_repo_labels(self): """Update repository with labels.""" labels = {"maiar": "mithrandir", "valar": "varda"} self._create_repo() resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": labels}) monitor_task(resp.task) self.repo = self.repo_api.read(self.repo.pulp_href) self.assertEqual(labels, self.repo.pulp_labels) def test_update_repo_label(self): """Test updating an existing label.""" labels = {"valar": "varda"} self._create_repo(labels) labels["valar"] = "manwe" resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": labels}) monitor_task(resp.task) self.repo = self.repo_api.read(self.repo.pulp_href) self.assertEqual(labels, self.repo.pulp_labels) def test_unset_repo_label(self): """Test unsetting a repo label.""" labels = {"maiar": "mithrandir", "valar": "varda"} self._create_repo(labels) labels.pop("valar") resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": labels}) monitor_task(resp.task) self.repo = self.repo_api.read(self.repo.pulp_href) self.assertEqual(1, len(self.repo.pulp_labels)) self.assertEqual(labels, self.repo.pulp_labels) def test_remove_all_repo_labels(self): """Test removing all labels.""" labels = {"maiar": "mithrandir", "valar": "varda"} self._create_repo(labels) resp = self.repo_api.partial_update(self.repo.pulp_href, {"pulp_labels": {}}) monitor_task(resp.task) self.repo = self.repo_api.read(self.repo.pulp_href) self.assertEqual(0, len(self.repo.pulp_labels)) self.assertEqual({}, self.repo.pulp_labels) def test_model_partial_update(self): """Test that labels aren't unset accidentially with PATCH calls.""" labels = {"maiar": "mithrandir", "valar": "varda"} self._create_repo(labels) resp = self.repo_api.partial_update(self.repo.pulp_href, {"name": str(uuid4())}) monitor_task(resp.task) self.repo = self.repo_api.read(self.repo.pulp_href) self.assertEqual(labels, self.repo.pulp_labels) def test_invalid_label_type(self): """Test that label doesn't accept non-dicts""" with self.assertRaises(ApiException) as ae: self._create_repo("morgoth") # str instead of dict self.assertEqual(400, ae.exception.status) self.assertTrue("pulp_labels" in json.loads(ae.exception.body)) def test_invalid_labels(self): """Test that label keys and values are validated.""" with self.assertRaises(ApiException) as ae: self._create_repo({"@": "maia"}) self.assertEqual(400, ae.exception.status) self.assertTrue("pulp_labels" in json.loads(ae.exception.body)) with self.assertRaises(ApiException) as ae: self._create_repo({"arda": "eru,illuvata"}) self.assertEqual(400, ae.exception.status) self.assertTrue("pulp_labels" in json.loads(ae.exception.body))