def setUpClass(cls): """Create repos, remotes, and api-clients for all tests.""" cls.cfg = config.get_config() cls.client = ApiClient(configuration=cls.cfg.get_bindings_config()) file_client = gen_file_client() cls.admin_info = { "task_api": TasksApi(cls.client), "file_client": file_client, "remote_api": RemotesFileApi(file_client), "repo_api": RepositoriesFileApi(file_client), } cls.admin_info["a_remote"] = cls.admin_info["remote_api"].create( gen_file_remote(policy="on_demand") ) cls.admin_info["a_repo"] = cls.admin_info["repo_api"].create(gen_repo()) cls.admin_info["sync_data"] = RepositorySyncURL(remote=cls.admin_info["a_remote"].pulp_href) cls.new_user = gen_user() file_client = gen_file_client() cls.user_info = { "task_api": TasksApi(cls.client), "file_client": file_client, "remote_api": RemotesFileApi(file_client), "repo_api": RepositoriesFileApi(file_client), } cls.user_info["a_remote"] = cls.user_info["remote_api"].create( gen_file_remote(policy="on_demand") ) cls.user_info["a_repo"] = cls.user_info["repo_api"].create(gen_repo()) cls.user_info["sync_data"] = RepositorySyncURL(remote=cls.user_info["a_remote"].pulp_href)
def test_ondemand_to_immediate_sync(self): """Checks that content artifacts are updated following on-demand -> immediate sync.""" # Ensure that no content is present content_response = self.cont_api.list(limit=1) if content_response.count > 0: self.skipTest( "Please remove all file content before running this test") # Create and sync repo w/ on_demand policy repo = self.repo_api.create(gen_repo()) remote = self.remote_api.create(gen_file_remote(policy="on_demand")) body = RepositorySyncURL(remote=remote.pulp_href) monitor_task(self.repo_api.sync(repo.pulp_href, body).task) self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) # Check content is present, but no artifacts are there content_response = self.cont_api.list() self.assertEqual(content_response.count, 3) for content in content_response.results: self.assertEqual(content.artifact, None) # Sync again w/ immediate policy remote = self.remote_api.create(gen_file_remote()) body = RepositorySyncURL(remote=remote.pulp_href) monitor_task(self.repo_api.sync(repo.pulp_href, body).task) self.addCleanup(self.remote_api.delete, remote.pulp_href) # Check content is still present, but artifacts are now there content_response = self.cont_api.list() self.assertEqual(content_response.count, 3) for content in content_response.results: self.assertNotEqual(content.artifact, None)
def test_all(self): """Sync two repositories and check view filter.""" # Test content doesn't exists. non_existant_content_href = ( "/pulp/api/v3/content/file/files/c4ed74cf-a806-490d-a25f-94c3c3dd2dd7/" ) with self.assertRaises(ApiException) as ctx: self.repo_ver_api.list(content=non_existant_content_href) self.assertEqual(ctx.exception.status, 400) # No repository version exists. self.assertEqual(self.repo_ver_api.list().count, 0) repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) repo_second = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo_second.pulp_href) remote = self.remote_api.create(gen_file_remote()) self.addCleanup(self.remote_api.delete, remote.pulp_href) body = gen_file_remote(url=FILE2_FIXTURE_MANIFEST_URL) remote_second = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote_second.pulp_href) repo_sync_data = RepositorySyncURL(remote=remote.pulp_href) repo_sync_data_second = RepositorySyncURL( remote=remote_second.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repo_sync_data) monitor_task(sync_response.task) sync_response_second = self.repo_api.sync(repo_second.pulp_href, repo_sync_data_second) monitor_task(sync_response_second.task) # Update repository data and get one content unit from first repository. repo = self.repo_api.read(repo.pulp_href) content_href = get_content( repo.to_dict())[FILE_CONTENT_NAME][0]["pulp_href"] rv_total = len(self.repo_ver_api.list().to_dict()["results"]) rv_search = self.repo_ver_api.list( content=content_href).to_dict()["results"] # Test only one repostiory version has selected content. self.assertEqual(len(rv_search), 1) # Test if repositories version with content matches. self.assertEqual(rv_search[0]["pulp_href"], repo.latest_version_href) # Test total number of repository version. Two for each repository. self.assertEqual(rv_total, 4)
def do_test(self, url): """Sync repositories with the file plugin. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is different from the previous one. 8. Assert that the same number of are present and that no units were added. """ repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(url) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo.latest_version_href repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY)
def setup_download_test(self, policy, url=None, publish=True): # Create a repository self.repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, self.repo.pulp_href) # Create a remote remote_options = {"policy": policy} if url: remote_options["url"] = url self.remote = self.remote_api.create(gen_file_remote(**remote_options)) self.addCleanup(self.remote_api.delete, self.remote.pulp_href) # Sync the repository. repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href) sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) if publish: # Create a publication. publish_data = FileFilePublication(repository=self.repo.pulp_href) publish_response = self.publications_api.create(publish_data) publication_href = monitor_task(publish_response.task).created_resources[0] self.addCleanup(self.publications_api.delete, publication_href) serve, served_href = "publication", publication_href else: serve, served_href = "repository", self.repo.pulp_href # Create a distribution. response = self.distributions_api.create(gen_distribution(**{serve: served_href})) distribution_href = monitor_task(response.task).created_resources[0] self.distribution = self.distributions_api.read(distribution_href) self.addCleanup(self.distributions_api.delete, self.distribution.pulp_href)
def setUpClass(cls): """Sets up class""" client = gen_file_client() cls.cont_api = ContentFilesApi(client) cls.repo_api = RepositoriesFileApi(client) cls.remote_api = RemotesFileApi(client) cls.pub_api = PublicationsFileApi(client) cls.dis_api = DistributionsFileApi(client) cls.repo = cls.repo_api.create(gen_repo(autopublish=True)) cls.remote = cls.remote_api.create(gen_file_remote()) body = RepositorySyncURL(remote=cls.remote.pulp_href) created = monitor_task( cls.repo_api.sync(cls.repo.pulp_href, body).task).created_resources cls.repo = cls.repo_api.read(cls.repo.pulp_href) cls.pub1 = cls.pub_api.read(created[1]) body = FileFilePublication(repository=cls.repo.pulp_href) cls.pub2 = cls.pub_api.read( monitor_task(cls.pub_api.create(body).task).created_resources[0]) cls.pub3 = [] response = cls.dis_api.create( gen_distribution(repository=cls.repo.pulp_href)) cls.distro = cls.dis_api.read( monitor_task(response.task).created_resources[0]) cls.distro2 = [] cls.url = urljoin(PULP_CONTENT_BASE_URL, f"{cls.distro.base_path}/")
def _setup_repositories(cls): """Create and sync a number of repositories to be exported.""" # create and remember a set of repo repos = [] remotes = [] publications = [] for r in range(NUM_REPOS): repo = cls.repo_api.create(gen_repo()) remote = cls.remote_api.create(gen_file_remote()) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = cls.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = cls.repo_api.read(file_file_repository_href=repo.pulp_href) publish_data = FileFilePublication(repository=repo.pulp_href) publish_response = cls.publication_api.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] publication = cls.publication_api.read(publication_href) repos.append(repo) remotes.append(remote) publications.append(publication) return repos, remotes, publications
def test_nonpublished_content_not_served(self): """Verify content that hasn't been published is not served.""" # Create a repository self.repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, self.repo.pulp_href) # Create a remote self.remote = self.remote_api.create(gen_file_remote()) self.addCleanup(self.remote_api.delete, self.remote.pulp_href) # Sync the repository. repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href) sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Create a distribution. response = self.distributions_api.create({ "name": "foo", "base_path": "bar/foo", "repository": self.repo.pulp_href }) distribution_href = monitor_task(response.task).created_resources[0] self.distribution = self.distributions_api.read(distribution_href) self.addCleanup(self.distributions_api.delete, self.distribution.pulp_href)
def test_01_sync(self): """Assert that syncing the repository triggers auto-publish and auto-distribution.""" self.assertEqual(self.publications_api.list().count, 0) self.assertTrue(self.distribution.publication is None) # Sync the repository. repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href) sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) task = monitor_task(sync_response.task) # Check that all the appropriate resources were created self.assertGreater(len(task.created_resources), 1) publications = self.publications_api.list() self.assertEqual(publications.count, 1) download_content_unit(self.cfg, self.distribution.to_dict(), self.CUSTOM_MANIFEST) # Check that the publish settings were used publication = publications.results[0] self.assertEqual(publication.manifest, self.CUSTOM_MANIFEST) # Sync the repository again. Since there should be no new repository version, there # should be no new publications or distributions either. sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) task = monitor_task(sync_response.task) self.assertEqual(len(task.created_resources), 0) self.assertEqual(self.publications_api.list().count, 1)
def test_serving_acs_content(self): """Test serving of ACS content through the content app.""" cfg = config.get_config() acs = self._create_acs() resp = self.file_acs_api.refresh(acs.pulp_href, acs) monitor_task_group(resp.task_group) remote = self.file_remote_api.create( gen_file_remote(FILE_MANIFEST_ONLY_FIXTURE_URL, policy="on_demand") ) self.addCleanup(self.file_remote_api.delete, remote.pulp_href) repo = self.repo_api.create(gen_repo(remote=remote.pulp_href, autopublish=True)) self.addCleanup(self.repo_api.delete, repo.pulp_href) distribution_response = self.distribution_api.create( gen_distribution(repository=repo.pulp_href) ) created_resources = monitor_task(distribution_response.task).created_resources distribution = self.distribution_api.read(created_resources[0]) self.addCleanup(self.distribution_api.delete, distribution.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) unit_path = choice(get_file_content_paths(repo.to_dict())) fixtures_hash = hashlib.sha256( utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path)) ).hexdigest() content = download_content_unit(cfg, distribution.to_dict(), unit_path) pulp_hash = hashlib.sha256(content).hexdigest() self.assertEqual(fixtures_hash, pulp_hash)
def _repo_sync_distribute(self, policy="immediate"): """Helper to create & populate a repository and distribute it.""" repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) # sync the repository with passed in policy body = gen_file_remote(**{"policy": policy}) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # sync repo repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) # Publication publication_data = FileFilePublication(repository=repo.pulp_href) publication_response = self.publication_api.create(publication_data) task_response = monitor_task(publication_response.task) publication = self.publication_api.read( task_response.created_resources[0]) self.addCleanup(self.publication_api.delete, publication.pulp_href) # Distribution body = gen_distribution() body["publication"] = publication.pulp_href distribution_response = self.distributions_api.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = self.distributions_api.read(created_resources[0]) self.addCleanup(self.distributions_api.delete, distribution.pulp_href) return repo, distribution
def do_test(self, policy): """Access lazy synced content on using content endpoint.""" # delete orphans to assure that no content units are present on the # file system delete_orphans() content_api = ContentFilesApi(self.client) repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(**{"policy": policy}) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Assert that no HTTP error was raised. # Assert that the number of units present is according to the synced # feed. content = content_api.list().to_dict()["results"] self.assertEqual(len(content), FILE_FIXTURE_COUNT, content)
def do_publish(self, download_policy): """Publish repository synced with lazy download policy.""" repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) publications = PublicationsFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(policy=download_policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = FileFilePublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) self.assertIsNotNone(publication.repository_version, publication)
def test_clean_orphan_content_unit(self): """Test whether orphaned content units can be cleaned up.""" repo_api = RepositoriesFileApi(self.api_client) remote_api = RemotesFileApi(self.api_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) content = choice(get_content(repo.to_dict())[FILE_CONTENT_NAME]) # Create an orphan content unit. repo_api.modify(repo.pulp_href, dict(remove_content_units=[content["pulp_href"]])) artifacts_api = ArtifactsApi(core_client) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact is present on disk. relative_path = artifacts_api.read(content["artifact"]).file artifact_path = os.path.join(self.media_root, relative_path) cmd = ("ls", artifact_path) self.cli_client.run(cmd, sudo=True) file_contents_api = ContentFilesApi(self.api_client) # Delete first repo version. The previous removed content unit will be # an orphan. delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertIn(content["pulp_href"], content_units_href) content_before_cleanup = file_contents_api.list().count orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 10}) monitor_task(orphans_response.task) # assert content was not removed content_after_cleanup = file_contents_api.list().count self.assertEqual(content_after_cleanup, content_before_cleanup) orphans_response = self.orphans_cleanup_api.cleanup({"orphan_protection_time": 0}) monitor_task(orphans_response.task) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertNotIn(content["pulp_href"], content_units_href) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact was removed from disk. with self.assertRaises(CalledProcessError): self.cli_client.run(cmd)
def test_duplicate_file_sync(self): """Sync a repository with remotes containing same file names. This test does the following. 1. Create a repository in pulp. 2. Create two remotes containing the same file. 3. Check whether the created repo has only one copy of the file. This test targets the following issue: `Pulp #4738 <https://pulp.plan.io/issues/4738>`_ """ repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) # Step 1 repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) # Step 2 remote = remote_api.create(gen_file_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) remote2 = remote_api.create( gen_file_remote(url=FILE2_FIXTURE_MANIFEST_URL)) self.addCleanup(remote_api.delete, remote2.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) repository_sync_data = RepositorySyncURL(remote=remote2.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY)
def do_sync(self, download_policy): """Sync repositories with the different ``download_policy``. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of possible units to be downloaded were shown. 6. Sync the remote one more time in order to create another repository version. 7. Assert that repository version is the same as the previous one. 8. Assert that the same number of units are shown, and after the second sync no extra units should be shown, since the same remote was synced again. """ # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(**{"policy": download_policy}) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo.latest_version_href sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), FILE_FIXTURE_SUMMARY)
def setUp(self): """Create a new repository before each test.""" body = gen_file_remote() remote = self.remote_api.create(body) repo = self.repo_api.create(gen_repo()) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) self.repo = self.repo_api.read(repo.pulp_href)
def setUpClass(cls): """Create repos, remotes, and api-clients for all tests.""" cls.cfg = config.get_config() cls.client = ApiClient(configuration=cls.cfg.get_bindings_config()) cls.task_api = TasksApi(cls.client) cls.file_client = gen_file_client() cls.remote_api = RemotesFileApi(cls.file_client) cls.repo_api = RepositoriesFileApi(cls.file_client) cls.good_remote = cls.remote_api.create(gen_file_remote(policy="on_demand")) cls.good_repo = cls.repo_api.create(gen_repo()) cls.good_sync_data = RepositorySyncURL(remote=cls.good_remote.pulp_href) cls.bad_remote = cls.remote_api.create( gen_file_remote( "https://fixtures.pulpproject.org/THEREISNOFILEREPOHERE/", policy="on_demand" ) ) cls.bad_repo = cls.repo_api.create(gen_repo()) cls.bad_sync_data = RepositorySyncURL(remote=cls.bad_remote.pulp_href)
def test_remote_artifact_url_update(self): """Test that downloading on_demand content works after a repository layout change.""" FILE_NAME = "1.iso" # 1. Create a remote, repository and distribution - remote URL has links that should 404 remote_config = gen_file_remote( policy="on_demand", url=FILE_FIXTURE_WITH_MISSING_FILES_MANIFEST_URL) remote = self.remote_api.create(remote_config) self.addCleanup(self.remote_api.delete, remote.pulp_href) repo = self.repo_api.create( gen_repo(autopublish=True, remote=remote.pulp_href)) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_distribution(repository=repo.pulp_href) distribution_response = self.distributions_api.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = self.distributions_api.read(created_resources[0]) self.addCleanup(self.distributions_api.delete, distribution.pulp_href) # 2. Sync the repository, verify that downloading artifacts fails repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) with self.assertRaises(HTTPError): download_content_unit(self.cfg, distribution.to_dict(), FILE_NAME) # 3. Update the remote URL with one that works, sync again, check that downloading # artifacts works. update_response = self.remote_api.update( remote.pulp_href, gen_file_remote(policy="on_demand", url=FILE_FIXTURE_MANIFEST_URL)) monitor_task(update_response.task) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) content = download_content_unit(self.cfg, distribution.to_dict(), FILE_NAME) pulp_hash = hashlib.sha256(content).hexdigest() fixtures_hash = hashlib.sha256( utils.http_get(urljoin(FILE_FIXTURE_URL, FILE_NAME))).hexdigest() self.assertEqual(pulp_hash, fixtures_hash)
def test_all(self): """Perform a lazy sync and change to immeditae to force download.""" # delete orphans to assure that no content units are present on the # file system delete_orphans() client = gen_file_client() artifacts_api = ArtifactsApi(core_client) repo_api = RepositoriesFileApi(client) remote_api = RemotesFileApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository using a lazy download policy repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) artifacts = artifacts_api.list().to_dict()["results"] self.assertEqual(len(artifacts), 0, artifacts) # Update the policy to immediate response = remote_api.partial_update(remote.pulp_href, {"policy": "immediate"}) monitor_task(response.task) remote = remote_api.read(remote.pulp_href) self.assertEqual(remote.policy, "immediate") # Sync using immediate download policy repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Assert that missing artifacts are downloaded artifacts = artifacts_api.list().to_dict()["results"] self.assertEqual(len(artifacts), FILE_FIXTURE_COUNT, artifacts)
def setUpClass(cls): """ Initialize Pulp to make authorization assertions using client certificates. 0. Create a FileRepository 1. Create a FileRemote 2. Sync in a few units we can use to fetch with 3. Create a Publication 4. Create a CertGuard with the CA cert used to sign all client certificates 5. Create a Distribution for the publication that is protected by the CertGuard """ cls.teardown_cleanups = [] cls.cfg = config.get_config() file_client = gen_file_client() repo_api = RepositoriesFileApi(file_client) remote_api = RemotesFileApi(file_client) publications = PublicationsFileApi(file_client) cls.distributions_api = DistributionsFileApi(file_client) cls.repo = repo_api.create(gen_repo()) cls.teardown_cleanups.append((repo_api.delete, cls.repo.pulp_href)) body = gen_file_remote(policy="immediate") remote = remote_api.create(body) cls.teardown_cleanups.append((remote_api.delete, remote.pulp_href)) # Sync a Repository repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(cls.repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) cls.repo = repo_api.read(cls.repo.pulp_href) # Create a publication. publish_data = FileFilePublication(repository=cls.repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] cls.teardown_cleanups.append((publications.delete, publication_href)) content_guard_href = cls._setup_content_guard() # Create a distribution. body = gen_distribution() body["publication"] = publication_href body["content_guard"] = content_guard_href distribution_response = cls.distributions_api.create(body) created_resources = monitor_task(distribution_response.task) cls.distribution = cls.distributions_api.read(created_resources[0]) cls.teardown_cleanups.append((cls.distributions_api.delete, cls.distribution.pulp_href))
def setUp(self): """Create a new repository before each test.""" manifest_path = "/pulp/content/pulp_pre_upgrade_test/PULP_MANIFEST" url = self.cfg.get_content_host_base_url() + manifest_path body = gen_file_remote(url=url) remote = self.remote_api.create(body) repo = self.repo_api.create(gen_repo()) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) self.repo = self.repo_api.read(repo.pulp_href)
def test_reclaim_immediate_content(self): """ Test whether immediate repository content can be reclaimed and then re-populated back after sync. """ repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) remote = self.remote_api.create(gen_file_remote()) self.addCleanup(self.remote_api.delete, remote.pulp_href) # sync the repository with immediate policy repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # reclaim disk space reclaim_response = self.reclaim_api.reclaim( {"repo_hrefs": [repo.pulp_href]}) monitor_task(reclaim_response.task) # assert no artifacts left artifacts = self.artifacts_api.list().count self.assertEqual(artifacts, 0) # sync repo again repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # assert re-sync populated missing artifacts artifacts = self.artifacts_api.list().count self.assertGreater(artifacts, 0) self.addCleanup(self.orphans_api.cleanup, {"orphan_protection_time": 0})
def do_test(self, url): """Sync a repository given ``url`` on the remote.""" repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote(url=url) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) return monitor_task(sync_response.task)
def _run_basic_sync_and_assert(remote, file_repo, file_repo_api_client, content_file_api_client, policy="on_demand"): body = RepositorySyncURL(remote=remote.pulp_href) monitor_task(file_repo_api_client.sync(file_repo.pulp_href, body).task) # Check content is present, but no artifacts are there content_response = content_file_api_client.list() assert content_response.count == 3 for content in content_response.results: if policy == "immediate": assert content.artifact is not None else: assert content.artifact is None
def _setup_repositories(cls): """Create and sync a number of repositories to be exported.""" # create and remember a set of repo repos = [] remotes = [] for r in range(NUM_REPOS): a_repo = cls.repo_api.create(gen_repo()) # give it a remote and sync it body = gen_file_remote() remote = cls.remote_api.create(body) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = cls.repo_api.sync(a_repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # remember it repos.append(a_repo) remotes.append(remote) return repos, remotes
def test_clean_specific_orphans(self): """Test whether the `content_hrefs` param removes specific orphans but not others""" repo_api = RepositoriesFileApi(self.api_client) remote_api = RemotesFileApi(self.api_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create two orphaned content units. content_a = get_content( repo.to_dict())[FILE_CONTENT_NAME][0]["pulp_href"] content_b = get_content( repo.to_dict())[FILE_CONTENT_NAME][1]["pulp_href"] content_to_remove = dict(remove_content_units=[content_a, content_b]) repo_api.modify(repo.pulp_href, content_to_remove) file_contents_api = ContentFilesApi(self.api_client) # Delete first repo version. The previous removed content unit will be an orphan. delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertIn(content_a, content_units_href) self.assertIn(content_b, content_units_href) content_hrefs_dict = {"content_hrefs": [content_a]} orphans_response = self.orphans_cleanup_api.cleanup(content_hrefs_dict) monitor_task(orphans_response.task) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertNotIn(content_a, content_units_href) self.assertIn(content_b, content_units_href)
def test_01_sync(self): """Assert that syncing the repository w/ mirror=True creates a publication.""" # Sync the repository. repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href, mirror=True) sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) task = monitor_task(sync_response.task) # Check that all the appropriate resources were created self.assertEqual(len(task.created_resources), 2) self.assertTrue( any([ "publication" in resource for resource in task.created_resources ])) self.assertTrue( any(["version" in resource for resource in task.created_resources]))
def test_acs_sync_with_paths(self): """Test syncing from an ACS using different paths.""" repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) remote = self.file_remote_api.create(gen_file_remote(FILE_MANIFEST_ONLY_FIXTURE_URL)) self.addCleanup(self.file_remote_api.delete, remote.pulp_href) acs = self._create_acs( paths=("file/PULP_MANIFEST", "file2/PULP_MANIFEST"), remote_url=PULP_FIXTURES_BASE_URL, ) resp = self.file_acs_api.refresh(acs.pulp_href, acs) task_group = monitor_task_group(resp.task_group) self.assertEquals(len(task_group.tasks), 2) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task)
def test_access_error(self): """HTTP error is not raised when accessing published data.""" repo_api = RepositoriesFileApi(self.client) remote_api = RemotesFileApi(self.client) publications = PublicationsFileApi(self.client) distributions = DistributionsFileApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) remote = remote_api.create(gen_file_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = FileFilePublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) pulp_manifest = parse_pulp_manifest( self.download_pulp_manifest(distribution.to_dict(), "PULP_MANIFEST")) self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest)