def do_sync(self, sync_udebs, fixture_summary): """Sync repositories with the deb plugin. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is the same as the previous one. 8. Assert that the same number of content units are present and that no units were added. """ repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(sync_udebs=sync_udebs, gpgkey=DEB_SIGNING_KEY) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), fixture_summary) self.assertDictEqual(get_added_content_summary(repo.to_dict()), fixture_summary) # Sync the repository again. latest_version_href = repo.latest_version_href repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), fixture_summary)
def do_test(self, policy): """Access lazy synced content on using content endpoint.""" # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = deb_repository_api remote_api = deb_remote_api packages_api = deb_package_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/1/") # Assert that no HTTP error was raised. # Assert that the number of units present is according to the synced # feed. content = packages_api.list() self.assertEqual(content.count, DEB_FIXTURE_PACKAGE_COUNT, content)
def do_publish(self, download_policy): """Publish repository synced with lazy download policy.""" publication_api = deb_apt_publication_api repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=download_policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = DebAptPublication(simple=True, repository=repo.pulp_href) publish_response = publication_api.create(publish_data) publication_href = monitor_task( publish_response.task).created_resources[0] self.addCleanup(publication_api.delete, publication_href) publication = publication_api.read(publication_href) self.assertIsNotNone(publication.repository_version, publication)
def do_sync(self, download_policy): """Sync repositories with the different ``download_policy``. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of possible units to be downloaded were shown. 6. Sync the remote one more time in order to create another repository version. 7. Assert that repository version is the same as the previous one. 8. Assert that the same number of units are shown, and after the second sync no extra units should be shown, since the same remote was synced again. """ # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=download_policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), DEB_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), DEB_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo.latest_version_href sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), DEB_FIXTURE_SUMMARY)
def do_test(self, policy): """Perform a lazy sync and change to immediate to force download.""" NON_LAZY_ARTIFACT_COUNT = 17 # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository using a lazy download policy repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) artifacts = artifact_api.list() self.assertEqual(artifacts.count, NON_LAZY_ARTIFACT_COUNT, artifacts) # Update the policy to immediate update_response = remote_api.partial_update(remote.pulp_href, {"policy": "immediate"}) monitor_task(update_response.task) remote = remote_api.read(remote.pulp_href) self.assertEqual(remote.policy, "immediate") # Sync using immediate download policy repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Assert that missing artifacts are downloaded artifacts = artifact_api.list() self.assertEqual(artifacts.count, NON_LAZY_ARTIFACT_COUNT + DEB_FIXTURE_PACKAGE_COUNT, artifacts)
def do_test(self, url=DEB_FIXTURE_URL, distribution=DEB_FIXTURE_DISTRIBUTIONS, **kwargs): """Sync a repository given ``url`` on the remote.""" repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(url=url, distributions=distribution, **kwargs) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) return monitor_task(sync_response.task)
def test_file_decriptors(self): """Test whether file descriptors are closed properly. This test targets the following issue: `Pulp #4073 <https://pulp.plan.io/issues/4073>`_ Do the following: 1. Check if 'lsof' is installed. If it is not, skip this test. 2. Create and sync a repo. 3. Run the 'lsof' command to verify that files in the path ``/var/lib/pulp/`` are closed after the sync. 4. Assert that issued command returns `0` opened files. """ cli_client = cli.Client(self.cfg, cli.echo_handler) repo_api = deb_repository_api remote_api = deb_remote_api # check if 'lsof' is available if cli_client.run(("which", "lsof")).returncode != 0: raise unittest.SkipTest("lsof package is not present") repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) remote = remote_api.create(gen_deb_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) cmd = "lsof -t +D {}".format(MEDIA_PATH).split() response = cli_client.run(cmd).stdout self.assertEqual(len(response), 0, response)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() repo_api = deb_repository_api remote_api = deb_remote_api publication_api = self.Meta.publication_api body = gen_deb_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Step 1 repo = repo_api.read(repo.pulp_href) for deb_generic_content in get_content( repo.to_dict())[DEB_GENERIC_CONTENT_NAME]: modify_repo(cfg, repo.to_dict(), remove_units=[deb_generic_content]) for deb_package in get_content(repo.to_dict())[DEB_PACKAGE_NAME]: modify_repo(cfg, repo.to_dict(), remove_units=[deb_package]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) non_latest = choice(version_hrefs[:-1]) # Step 2 publish_data = self.Meta.Publication(repository=repo.pulp_href, **self._publication_extra_args()) publish_response = publication_api.create(publish_data) publication_href = monitor_task( publish_response.task).created_resources[0] self.addCleanup(publication_api.delete, publication_href) publication = publication_api.read(publication_href) # Step 3 self.assertEqual(publication.repository_version, version_hrefs[-1]) # Step 4 publish_data = self.Meta.Publication(repository_version=non_latest, **self._publication_extra_args()) publish_response = publication_api.create(publish_data) publication_href = monitor_task( publish_response.task).created_resources[0] publication = publication_api.read(publication_href) # Step 5 self.assertEqual(publication.repository_version, non_latest) # Step 6 with self.assertRaises(ApiException): body = { "repository": repo.pulp_href, "repository_version": non_latest } publication_api.create(body)
def test_publish(self): """Publish particular empty repository with no packages. 1. Create a repository with given distribtuions. 2. Create a publication. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Assert that Package Index File is not empty. 5. Assert that there are no packages. """ # Create a repository: repo = deb_repository_api.create(gen_repo()) self.addCleanup(deb_repository_api.delete, repo.pulp_href) # Create a remote: body = gen_deb_remote(url=DEB_FIXTURE_URL, distributions="ginnungagap") remote = deb_remote_api.create(body) self.addCleanup(deb_remote_api.delete, remote.pulp_href) # Sync the repository: repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = deb_repository_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = deb_repository_api.read(repo.pulp_href) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) self.assertIsNotNone(repo.latest_version_href) # Create a publication: publish_data = DebAptPublication(repository=repo.pulp_href, **self._publication_extra_args()) publish_response = deb_apt_publication_api.create(publish_data) publication_href = monitor_task( publish_response.task).created_resources[0] self.addCleanup(deb_apt_publication_api.delete, publication_href) publication = deb_apt_publication_api.read(publication_href) # Test the publication: self.assertEqual(publication.repository_version, version_hrefs[-1]) release = get_content(repo=publication.to_dict(), version_href=publication.repository_version) package_index_paths = [ "dists/ginnungagap/asgard/binary-ppc64/Packages", "dists/ginnungagap/jotunheimr/binary-armeb/Packages", "dists/ginnungagap/asgard/binary-armeb/Packages", "dists/ginnungagap/jotunheimr/binary-ppc64/Packages", "dists/default/all/binary-all/Packages", ] self.assertFalse(release[DEB_PACKAGE_NAME]) self.assertTrue(release[DEB_PACKAGE_INDEX_NAME]) self.assertEqual( len(package_index_paths) - 1, len(release[DEB_PACKAGE_INDEX_NAME])) # Create a distribution: body = gen_distribution() body["publication"] = publication_href distribution_response = deb_distribution_api.create(body) distribution_href = monitor_task( distribution_response.task).created_resources[0] distribution = deb_distribution_api.read(distribution_href) self.addCleanup(deb_distribution_api.delete, distribution.pulp_href) # Check that the expected package indecies are there: cfg = config.get_config() for package_index_path in package_index_paths: download_content_unit(cfg, distribution.to_dict(), package_index_path)
def do_publish(self, expected_values, modus): """Publish particular repository in flat format. 1. Create a repository in flat repo format. 2. Create a publication. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Assert that Release file path is equal to desired file path. 5. Assert that the codename, suite and component are as expected. """ # Create a repository: repo = deb_repository_api.create(gen_repo()) self.addCleanup(deb_repository_api.delete, repo.pulp_href) # Create a remote: body = gen_deb_remote( # DEB_FLAT_REPO_FIXTURE_URL url=DEB_FLAT_REPO_FIXTURE_URL, distributions=expected_values["distribution"]) remote = deb_remote_api.create(body) self.addCleanup(deb_remote_api.delete, remote.pulp_href) # Sync the repository: repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = deb_repository_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = deb_repository_api.read(repo.pulp_href) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) self.assertIsNotNone(repo.latest_version_href) # Create a publication: if modus == "verbatim": publication_api = deb_verbatim_publication_api Publication = DebVerbatimPublication else: publication_api = deb_apt_publication_api Publication = DebAptPublication publish_data = Publication(repository=repo.pulp_href, **self._publication_extra_args(modus)) publish_response = publication_api.create(publish_data) publication_href = monitor_task( publish_response.task).created_resources[0] self.addCleanup(publication_api.delete, publication_href) publication = publication_api.read(publication_href) # Test the publication: self.assertEqual(publication.repository_version, version_hrefs[-1]) release_file = get_content(repo=publication.to_dict(), version_href=publication.repository_version )[DEB_RELEASE_FILE_NAME][0] release_file_path = os.path.join( expected_values["release_file_folder_sync"], "Release") self.assertEqual(release_file_path, release_file["relative_path"]) self.assertEqual(expected_values["distribution"], release_file["distribution"]) self.assertEqual(expected_values["codename"], release_file["codename"]) self.assertEqual(expected_values["suite"], release_file["suite"]) release = get_content( repo=publication.to_dict(), version_href=publication.repository_version)[DEB_RELEASE_NAME][0] self.assertEqual(expected_values["distribution"], release["distribution"]) self.assertEqual(expected_values["codename"], release["codename"]) self.assertEqual(expected_values["suite"], release["suite"]) components = get_content(repo=publication.to_dict(), version_href=publication.repository_version )[DEB_RELEASE_COMPONENT_NAME] self.assertEqual(len(expected_values["components"]), len(components)) for component in components: self.assertIn(component["component"], expected_values["components"]) package_indecies = get_content( repo=publication.to_dict(), version_href=publication.repository_version )[DEB_PACKAGE_INDEX_NAME] self.assertEqual(len(expected_values["package_index_paths_sync"]), len(package_indecies)) for package_index in package_indecies: self.assertIn(package_index["relative_path"], expected_values["package_index_paths_sync"]) # Create a distribution: body = gen_distribution() body["publication"] = publication_href distribution_response = deb_distribution_api.create(body) distribution_href = monitor_task( distribution_response.task).created_resources[0] distribution = deb_distribution_api.read(distribution_href) self.addCleanup(deb_distribution_api.delete, distribution.pulp_href) # Check that the expected Release files and package indecies are there: cfg = config.get_config() release_file_path = os.path.join( expected_values["release_file_folder_dist"], "Release") download_content_unit(cfg, distribution.to_dict(), release_file_path) for package_index_path in expected_values["package_index_paths_dist"]: download_content_unit(cfg, distribution.to_dict(), package_index_path)
def do_publish(self, expected_values): """Publish particular repository with missing package indices. 1. Create a repository with missing package indices. 2. Create a publication. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Assert that InRelease file path is equal to desired file path. 5. Assert that the codename, suite and component are as expected. """ # Create a repository: repo = deb_repository_api.create(gen_repo()) self.addCleanup(deb_repository_api.delete, repo.pulp_href) # Create a remote: body = gen_deb_remote( url=DEB_MISSING_ARCH_DISTS_FIXTURE_URL, distributions=expected_values["distribution"], ignore_missing_package_indices=True, ) remote = deb_remote_api.create(body) self.addCleanup(deb_remote_api.delete, remote.pulp_href) # Sync the repository: repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = deb_repository_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = deb_repository_api.read(repo.pulp_href) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) # Create a publication: publish_data = DebAptPublication(repository=repo.pulp_href, **self._publication_extra_args()) publish_response = deb_apt_publication_api.create(publish_data) publication_href = monitor_task( publish_response.task).created_resources[0] self.addCleanup(deb_apt_publication_api.delete, publication_href) publication = deb_apt_publication_api.read(publication_href) # Test the publication: self.assertEqual(publication.repository_version, version_hrefs[-1]) release_file = get_content(repo=publication.to_dict(), version_href=publication.repository_version )[DEB_RELEASE_FILE_NAME][0] release_file_path = os.path.join( expected_values["release_file_folder"], "InRelease") self.assertEqual(release_file_path, release_file["relative_path"]) self.assertEqual(expected_values["distribution"], release_file["distribution"]) self.assertEqual(expected_values["codename"], release_file["codename"]) self.assertEqual(expected_values["suite"], release_file["suite"]) release = get_content( repo=publication.to_dict(), version_href=publication.repository_version)[DEB_RELEASE_NAME][0] self.assertEqual(expected_values["distribution"], release["distribution"]) self.assertEqual(expected_values["codename"], release["codename"]) self.assertEqual(expected_values["suite"], release["suite"]) components = get_content(repo=publication.to_dict(), version_href=publication.repository_version )[DEB_RELEASE_COMPONENT_NAME] self.assertEqual({c["component"] for c in components}, set(expected_values["components"])) package_indices = get_content( repo=publication.to_dict(), version_href=publication.repository_version )[DEB_PACKAGE_INDEX_NAME] # Packages has index in release file but may not be there self.assertNotEqual(len(expected_values["package_index_paths"]), len(package_indices)) for package_index in package_indices: # all existing Packages files are there is_true = False for package_index_expected in expected_values[ "package_index_paths"]: if package_index["relative_path"] == os.path.join( package_index_expected, "Packages"): is_true = True self.assertTrue(is_true) self.assertFalse( os.path.isdir( os.path.join(remote.url, "dists/ragnarok/asgard/binary-armeb"))) self.assertFalse( os.path.isdir( os.path.join(remote.url, "dists/ragnarok/jotunheimr/binary-armeb"))) # Create a distribution: body = gen_distribution() body["publication"] = publication_href distribution_response = deb_distribution_api.create(body) distribution_href = monitor_task( distribution_response.task).created_resources[0] distribution = deb_distribution_api.read(distribution_href) self.addCleanup(deb_distribution_api.delete, distribution.pulp_href) # Check that the expected Release files and package indices are there: cfg = config.get_config() release_file_path = os.path.join( expected_values["release_file_folder"], "Release") download_content_unit(cfg, distribution.to_dict(), release_file_path) for package_index_path in expected_values["package_index_paths"]: download_content_unit(cfg, distribution.to_dict(), package_index_path + "/Packages")
def do_test(self, policy): """Verify whether content served by pulp can be downloaded. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Select a random content unit in the distribution. Download that content unit from Pulp, and verify that the content unit has the same checksum when fetched directly from Pulp-Fixtures. This test targets the following issues: * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_ * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_ """ repo_api = deb_repository_api remote_api = deb_remote_api publication_api = self.Meta.publication_api distribution_api = deb_distribution_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = self.Meta.Publication(repository=repo.pulp_href) publish_response = publication_api.create(publish_data) publication_href = monitor_task(publish_response.task)[0] self.addCleanup(publication_api.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distribution_api.create(body) distribution_href = monitor_task(distribution_response.task)[0] distribution = distribution_api.read(distribution_href) self.addCleanup(distribution_api.delete, distribution.pulp_href) # Pick a content unit (of each type), and download it from both Pulp Fixtures… unit_paths = [ choice(paths) for paths in self.Meta.get_content_unit_paths(repo).values() if paths ] fixtures_hashes = [ hashlib.sha256( utils.http_get(urljoin(DEB_FIXTURE_URL, unit_path[0]))).hexdigest() for unit_path in unit_paths ] # …and Pulp. pulp_hashes = [] cfg = config.get_config() for unit_path in unit_paths: content = download_content_unit(cfg, distribution.to_dict(), unit_path[1]) pulp_hashes.append(hashlib.sha256(content).hexdigest()) self.assertEqual(fixtures_hashes, pulp_hashes)