def test_on_demand_pypi_full_sync(self): """This test syncs all of PyPi""" repo_api = RepositoriesPythonApi(self.client) remote_api = RemotesPythonApi(self.client) tasks_api = TasksApi(self.core_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_python_remote("https://pypi.org", includes=[], policy="on_demand") remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) sync_task = tasks_api.read(sync_response.task) time_diff = sync_task.finished_at - sync_task.started_at print("Delete time: {} seconds".format(time_diff.seconds)) self.assertIsNotNone(repo.latest_version_href) # As of August 11 2020, all_packages() returns 253,587 packages, # only 248,677 of them were downloadable self.assertTrue(get_content_summary(repo.to_dict())[PYTHON_CONTENT_NAME] > 245000)
def test_on_demand(self): """Test whether a particular repository version can be published. 1. Create a repository 2. Create a remote with on_demand sync policy 3. Sync 4. Publish repository """ client = gen_python_client() repo_api = RepositoriesPythonApi(client) remote_api = RemotesPythonApi(client) publications = PublicationsPypiApi(client) body = gen_python_remote(policy="on_demand") remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = PythonPythonPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) self.assertEqual(publication.repository_version, repo.latest_version_href)
def test_workflow_02(self): """ Verify workflow 2 Do the following: 1. Create, populate, publish, and distribute a repository. 2. Pip install a package from the pulp repository. 3. Check pip install was successful. This test targets the following issues: * `Pulp #4682 <https://pulp.plan.io/issues/4682>`_ * `Pulp #4677 <https://pulp.plan.io/issues/4677>`_ """ repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_python_remote(includes=PYTHON_LIST_PROJECT_SPECIFIER) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) distribution = self.gen_pub_dist(repo) self.check_consume(distribution.to_dict())
def setUp(self): """Create a new repository before each test.""" body = gen_python_remote() remote = self.remote_api.create(body) repo = self.repo_api.create(gen_repo()) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) self.repo = self.repo_api.read(repo.pulp_href)
def sync_to_remote(self, body, create=False, mirror=False): """Takes a body and creates/updates a remote object, then it performs a sync""" if create: type(self).remote = self.remote_api.create(body) else: remote_task = self.remote_api.partial_update(self.remote.pulp_href, body) monitor_task(remote_task.task) type(self).remote = self.remote_api.read(self.remote.pulp_href) repository_sync_data = RepositorySyncURL( remote=self.remote.pulp_href, mirror=mirror ) sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) type(self).repo = self.repo_api.read(self.repo.pulp_href)
def do_test(self, url): """Sync a repository given ``url`` on the remote.""" repo_api = RepositoriesPythonApi(self.client) remote_api = RemotesPythonApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_python_remote(url=url) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) return monitor_task(sync_response.task)
def setUp(self): """Create a new repository before each test.""" pypi_path = "/pypi/pulp_pre_upgrade_test" url = self.cfg.get_content_host_base_url() + pypi_path body = gen_python_remote(url=url) remote = self.remote_api.create(body) repo = self.repo_api.create(gen_repo()) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) self.repo = self.repo_api.read(repo.pulp_href)
def _sync_repo(self, repo, **kwargs): """ Sync the repo with optional `kwarg` parameters passed on to the sync method. Args: repo: The repository to sync Returns: repository: The updated repository after the sync is complete """ repository_sync_data = RepositorySyncURL(**kwargs) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) return repo
def test_01_sync(self): """Assert that syncing the repository triggers auto-publish and auto-distribution.""" self.assertEqual(self.publications_api.list().count, 0) self.assertTrue(self.distribution.publication is None) # Sync the repository. repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href) sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) task = monitor_task(sync_response.task) # Check that all the appropriate resources were created self.assertGreater(len(task.created_resources), 1) self.assertEqual(self.publications_api.list().count, 1) download_content_unit(cfg, self.distribution.to_dict(), "simple/") # Sync the repository again. Since there should be no new repository version, there # should be no new publications or distributions either. sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) task = monitor_task(sync_response.task) self.assertEqual(len(task.created_resources), 0) self.assertEqual(self.publications_api.list().count, 1)
def populate_pulp(url=PYTHON_FIXTURE_URL): """Add python contents to Pulp. :param pulp_smash.config.PulpSmashConfig: Information about a Pulp application. :param url: The python repository URL. Defaults to :data:`pulp_smash.constants.PYTHON_FIXTURE_URL` :returns: A list of dicts, where each dict describes one python content in Pulp. """ remote = None repo = None try: remote = remote_api.create(gen_python_remote(url)) repo = repo_api.create(gen_repo()) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) finally: if remote: remote_api.delete(remote.pulp_href) if repo: repo_api.delete(repo.pulp_href) return content_api.list().to_dict()["results"]
def test_file_decriptors(self): """Test whether file descriptors are closed properly. This test targets the following issue: `Pulp #4073 <https://pulp.plan.io/issues/4073>`_ Do the following: 1. Check if 'lsof' is installed. If it is not, skip this test. 2. Create and sync a repo. 3. Run the 'lsof' command to verify that files in the path ``/var/lib/pulp/`` are closed after the sync. 4. Assert that issued command returns `0` opened files. """ cli_client = cli.Client(self.cfg, cli.echo_handler) # check if 'lsof' is available if cli_client.run(("which", "lsof")).returncode != 0: raise unittest.SkipTest("lsof package is not present") repo_api = RepositoriesPythonApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) remote_api = RemotesPythonApi(self.client) remote = remote_api.create(gen_python_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) cmd = "lsof -t +D {}".format(MEDIA_PATH).split() response = cli_client.run(cmd).stdout self.assertEqual(len(response), 0, response)
def create_workflow(repo_api, pub_api, remote_api=None, body={}, distro_api=None, cleanup=None): """Creates repository, publication, and potentially remote and distribution if specified.""" created_objects = [] repo = create_repository(repo_api, cleanup=cleanup) created_objects.append(repo) if remote_api: remote = create_remote(remote_api, body=body, cleanup=cleanup) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) created_objects[0] = repo created_objects.append(remote) publication = create_publication(pub_api, repo, cleanup=cleanup) created_objects.append(publication) if distro_api: distro = create_distribution(distro_api, publication, cleanup=cleanup) created_objects.append(distro) return created_objects
def test_mixed(self): """Test if repository with mixed synced content can be published.""" # Sync on demand content body = {"includes": PYTHON_SM_PROJECT_SPECIFIER} repo, _, pub = create_workflow(self.repo_api, self.pub_api, remote_api=self.remote_api, body=body, cleanup=self.addCleanup) self.assertEqual(pub.repository_version, repo.latest_version_href) # Add immediate content body = {"policy": "immediate"} remote = create_remote(self.remote_api, body=body, cleanup=self.addCleanup) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) pub = create_publication(self.pub_api, repo, cleanup=self.addCleanup) self.assertEqual(pub.repository_version, repo.latest_version_href)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() client = gen_python_client() repo_api = RepositoriesPythonApi(client) remote_api = RemotesPythonApi(client) publications = PublicationsPypiApi(client) body = gen_python_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Step 1 repo = repo_api.read(repo.pulp_href) for python_content in get_content(repo.to_dict())[PYTHON_CONTENT_NAME]: modify_repo(cfg, repo.to_dict(), add_units=[python_content]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) non_latest = choice(version_hrefs[:-1]) # Step 2 publish_data = PythonPythonPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) # Step 3 self.assertEqual(publication.repository_version, version_hrefs[-1]) # Step 4 publish_data.repository = None publish_data.repository_version = non_latest publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] publication = publications.read(publication_href) # Step 5 self.assertEqual(publication.repository_version, non_latest) # Step 6 with self.assertRaises(ApiException): body = { "repository": repo.pulp_href, "repository_version": non_latest } publications.create(body)
def test_all(self): """Verify whether content served by pulp can be downloaded. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Select a random content unit in the distribution. Download that content unit from Pulp, and verify that the content unit has the same checksum when fetched directly from Pulp-Fixtures. This test targets the following issues: * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_ * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_ """ client = gen_python_client() repo_api = RepositoriesPythonApi(client) remote_api = RemotesPythonApi(client) publications = PublicationsPypiApi(client) distributions = DistributionsPypiApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_python_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = PythonPythonPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task(distribution_response.task) distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) # Pick a content unit (of each type), and download it from both Pulp Fixtures… unit_paths = [ choice(paths) for paths in get_python_content_paths(repo.to_dict()).values() ] fixtures_hashes = [ hashlib.sha256( utils.http_get( urljoin(urljoin(PYTHON_FIXTURE_URL, "packages/"), unit_path[0]))).hexdigest() for unit_path in unit_paths ] # …and Pulp. pulp_hashes = [] cfg = config.get_config() for unit_path in unit_paths: content = download_content_unit(cfg, distribution.to_dict(), unit_path[1]) pulp_hashes.append(hashlib.sha256(content).hexdigest()) self.assertEqual(fixtures_hashes, pulp_hashes)