def test_sync(self): """Sync repositories with the python plugin. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is different from the previous one. 8. Assert that the same number of are present and that no units were added. """ repo_api = RepositoriesPythonApi(self.client) remote_api = RemotesPythonApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_python_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual( get_content_summary(repo.to_dict()), PYTHON_XS_FIXTURE_SUMMARY ) self.assertDictEqual( get_added_content_summary(repo.to_dict()), PYTHON_XS_FIXTURE_SUMMARY ) # Sync the repository again. latest_version_href = repo.latest_version_href repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual( get_content_summary(repo.to_dict()), PYTHON_XS_FIXTURE_SUMMARY )
def test_on_demand_pypi_full_sync(self): """This test syncs all of PyPi""" repo_api = RepositoriesPythonApi(self.client) remote_api = RemotesPythonApi(self.client) tasks_api = TasksApi(self.core_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_python_remote("https://pypi.org", includes=[], policy="on_demand") remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) sync_task = tasks_api.read(sync_response.task) time_diff = sync_task.finished_at - sync_task.started_at print("Delete time: {} seconds".format(time_diff.seconds)) self.assertIsNotNone(repo.latest_version_href) # As of August 11 2020, all_packages() returns 253,587 packages, # only 248,677 of them were downloadable self.assertTrue(get_content_summary(repo.to_dict())[PYTHON_CONTENT_NAME] > 245000)
def test_on_demand(self): """Test whether a particular repository version can be published. 1. Create a repository 2. Create a remote with on_demand sync policy 3. Sync 4. Publish repository """ client = gen_python_client() repo_api = RepositoriesPythonApi(client) remote_api = RemotesPythonApi(client) publications = PublicationsPypiApi(client) body = gen_python_remote(policy="on_demand") remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = PythonPythonPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) self.assertEqual(publication.repository_version, repo.latest_version_href)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() client = gen_python_client() repo_api = RepositoriesPythonApi(client) remote_api = RemotesPythonApi(client) publications = PublicationsPypiApi(client) body = gen_python_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Step 1 repo = repo_api.read(repo.pulp_href) for python_content in get_content(repo.to_dict())[PYTHON_CONTENT_NAME]: modify_repo(cfg, repo.to_dict(), add_units=[python_content]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) non_latest = choice(version_hrefs[:-1]) # Step 2 publish_data = PythonPythonPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) # Step 3 self.assertEqual(publication.repository_version, version_hrefs[-1]) # Step 4 publish_data.repository = None publish_data.repository_version = non_latest publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] publication = publications.read(publication_href) # Step 5 self.assertEqual(publication.repository_version, non_latest) # Step 6 with self.assertRaises(ApiException): body = { "repository": repo.pulp_href, "repository_version": non_latest } publications.create(body)
def test_all(self): """Verify whether content served by pulp can be downloaded. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Select a random content unit in the distribution. Download that content unit from Pulp, and verify that the content unit has the same checksum when fetched directly from Pulp-Fixtures. This test targets the following issues: * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_ * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_ """ client = gen_python_client() repo_api = RepositoriesPythonApi(client) remote_api = RemotesPythonApi(client) publications = PublicationsPypiApi(client) distributions = DistributionsPypiApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_python_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = PythonPythonPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task(distribution_response.task) distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) # Pick a content unit (of each type), and download it from both Pulp Fixtures… unit_paths = [ choice(paths) for paths in get_python_content_paths(repo.to_dict()).values() ] fixtures_hashes = [ hashlib.sha256( utils.http_get( urljoin(urljoin(PYTHON_FIXTURE_URL, "packages/"), unit_path[0]))).hexdigest() for unit_path in unit_paths ] # …and Pulp. pulp_hashes = [] cfg = config.get_config() for unit_path in unit_paths: content = download_content_unit(cfg, distribution.to_dict(), unit_path[1]) pulp_hashes.append(hashlib.sha256(content).hexdigest()) self.assertEqual(fixtures_hashes, pulp_hashes)