def do_publish(self, download_policy): """Publish repository synced with lazy download policy.""" publication_api = deb_apt_publication_api repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=download_policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) publish_data = DebAptPublication(simple=True, repository=repo.pulp_href) publish_response = publication_api.create(publish_data) publication_href = monitor_task(publish_response.task)[0] self.addCleanup(publication_api.delete, publication_href) publication = publication_api.read(publication_href) self.assertIsNotNone(publication.repository_version, publication)
def do_test(self, policy): """Access lazy synced content on using content endpoint.""" # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = deb_repository_api remote_api = deb_remote_api packages_api = deb_package_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/1/") # Assert that no HTTP error was raised. # Assert that the number of units present is according to the synced # feed. content = packages_api.list() self.assertEqual(content.count, DEB_FIXTURE_PACKAGE_COUNT, content)
def test_raise_error(self): """Create a duplicate content unit using same relative_path. Artifacts are unique by ``relative_path`` and ``file``. In order to raise an HTTP error, the same ``artifact`` and the same ``relative_path`` should be used. """ delete_orphans() artifact = gen_artifact(self.CONTENT_URL) attrs = self.gen_content_attrs(artifact) # create first content unit. response = self.content_api.create(**attrs) created_resources = monitor_task(response.task) self.content_api.read(created_resources[0]) # using the same attrs used to create the first content unit. response = self.content_api.create(**attrs) with self.assertRaises(PulpTaskError) as exc: monitor_task(response.task) self.assertEqual(exc.exception.task.state, "failed") error = exc.exception.task.error for key in ("already", "relative", "path", "sha256"): self.assertIn(key, error["description"].lower(), error)
def do_sync(self, sync_udebs, fixture_summary): """Sync repositories with the deb plugin. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is the same as the previous one. 8. Assert that the same number of content units are present and that no units were added. """ repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(sync_udebs=sync_udebs, gpgkey=DEB_SIGNING_KEY) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), fixture_summary) self.assertDictEqual(get_added_content_summary(repo.to_dict()), fixture_summary) # Sync the repository again. latest_version_href = repo.latest_version_href repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), fixture_summary)
def test_04_fully_update(self): """Update a remote using HTTP PUT.""" body = _gen_verbose_remote() response = self.remote_api.update(self.remote.pulp_href, body) monitor_task(response.task) for key in ("username", "password"): del body[key] type(self).remote = self.remote_api.read(self.remote.pulp_href) for key, val in body.items(): with self.subTest(key=key): self.assertEqual(self.remote.to_dict()[key], val, key)
def do_sync(self, download_policy): """Sync repositories with the different ``download_policy``. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of possible units to be downloaded were shown. 6. Sync the remote one more time in order to create another repository version. 7. Assert that repository version is the same as the previous one. 8. Assert that the same number of units are shown, and after the second sync no extra units should be shown, since the same remote was synced again. """ # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=download_policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), DEB_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), DEB_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo.latest_version_href sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), DEB_FIXTURE_SUMMARY)
def test_03_fail_duplicate_content_unit(self): """Create content unit.""" with NamedTemporaryFile() as temp_file: temp_file.write(self.file) temp_file.flush() response = self.content_api.create(**self.attrs, file=temp_file.name) with self.assertRaises(PulpTaskError) as exc: monitor_task(response.task) self.assertEqual(exc.exception.task.state, "failed") error = exc.exception.task.error for key in ("already", "relative", "path", "sha256"): self.assertIn(key, error["description"].lower(), error)
def test_02_change_policy(self): """Verify ability to change policy to value other than the default. Update the remote policy to a valid value other than `immedaite` and verify the new set value. """ changed_policy = choice( [item for item in self.policies if item != "immediate"]) response = self.remote_api.partial_update(self.remote["pulp_href"], {"policy": changed_policy}) monitor_task(response.task) self.remote.update( self.remote_api.read(self.remote["pulp_href"]).to_dict()) self.assertEqual(self.remote["policy"], changed_policy, self.remote)
def test_01_create_content_unit(self): """Create content unit.""" attrs = self.gen_content_attrs(self.artifact) response = self.content_api.create(**attrs) created_resources = monitor_task(response.task) content_unit = self.content_api.read(created_resources[0]) self.content_unit.update(content_unit.to_dict()) for key, val in attrs.items(): with self.subTest(key=key): self.assertEqual(self.content_unit[key], val)
def test_01_create_content_unit(self): """Create content unit.""" with NamedTemporaryFile() as temp_file: temp_file.write(self.file) temp_file.flush() response = self.content_api.create(**self.attrs, file=temp_file.name) created_resources = monitor_task(response.task) content_unit = self.content_api.read(created_resources[0]) self.content_unit.update(content_unit.to_dict()) for key, val in self.attrs.items(): with self.subTest(key=key): self.assertEqual(self.content_unit[key], val)
def test_non_error(self): """Create a duplicate content unit with different relative_path. Artifacts are unique by ``relative_path`` and ``file``. In order to avoid an HTTP error, use the same ``artifact`` and different ``relative_path``. """ delete_orphans() artifact = gen_artifact(self.CONTENT_URL) attrs = self.gen_content_attrs(artifact) # create first content unit. response = self.content_api.create(**attrs) created_resources = monitor_task(response.task) content_unit = self.content_api.read(created_resources[0]) # Packages types only validate the filename, so we can prepend something to the path. attrs["relative_path"] = "moved-" + content_unit.relative_path # create second content unit. response = self.content_api.create(**attrs) created_resources = monitor_task(response.task) content_unit = self.content_api.read(created_resources[0])
def test_file_decriptors(self): """Test whether file descriptors are closed properly. This test targets the following issue: `Pulp #4073 <https://pulp.plan.io/issues/4073>`_ Do the following: 1. Check if 'lsof' is installed. If it is not, skip this test. 2. Create and sync a repo. 3. Run the 'lsof' command to verify that files in the path ``/var/lib/pulp/`` are closed after the sync. 4. Assert that issued command returns `0` opened files. """ cli_client = cli.Client(self.cfg, cli.echo_handler) repo_api = deb_repository_api remote_api = deb_remote_api # check if 'lsof' is available if cli_client.run(("which", "lsof")).returncode != 0: raise unittest.SkipTest("lsof package is not present") repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) remote = remote_api.create(gen_deb_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) cmd = "lsof -t +D {}".format(MEDIA_PATH).split() response = cli_client.run(cmd).stdout self.assertEqual(len(response), 0, response)
def do_test(self, url=DEB_FIXTURE_URL, distribution=DEB_FIXTURE_DISTRIBUTIONS, **kwargs): """Sync a repository given ``url`` on the remote.""" repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(url=url, distributions=distribution, **kwargs) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) return monitor_task(sync_response.task)
def do_test(self, policy): """Perform a lazy sync and change to immediate to force download.""" NON_LAZY_ARTIFACT_COUNT = 17 # delete orphans to assure that no content units are present on the # file system delete_orphans() repo_api = deb_repository_api remote_api = deb_remote_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote(policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository using a lazy download policy repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) artifacts = artifact_api.list() self.assertEqual(artifacts.count, NON_LAZY_ARTIFACT_COUNT, artifacts) # Update the policy to immediate update_response = remote_api.partial_update(remote.pulp_href, {"policy": "immediate"}) monitor_task(update_response.task) remote = remote_api.read(remote.pulp_href) self.assertEqual(remote.policy, "immediate") # Sync using immediate download policy repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Assert that missing artifacts are downloaded artifacts = artifact_api.list() self.assertEqual(artifacts.count, NON_LAZY_ARTIFACT_COUNT + DEB_FIXTURE_PACKAGE_COUNT, artifacts)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() repo_api = deb_repository_api remote_api = deb_remote_api publication_api = self.Meta.publication_api body = gen_deb_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Step 1 repo = repo_api.read(repo.pulp_href) for deb_generic_content in get_content( repo.to_dict())[DEB_GENERIC_CONTENT_NAME]: modify_repo(cfg, repo.to_dict(), remove_units=[deb_generic_content]) for deb_package in get_content(repo.to_dict())[DEB_PACKAGE_NAME]: modify_repo(cfg, repo.to_dict(), remove_units=[deb_package]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) non_latest = choice(version_hrefs[:-1]) # Step 2 publish_data = self.Meta.Publication(repository=repo.pulp_href, **self._publication_extra_args()) publish_response = publication_api.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publication_api.delete, publication_href) publication = publication_api.read(publication_href) # Step 3 self.assertEqual(publication.repository_version, version_hrefs[-1]) # Step 4 publish_data = self.Meta.Publication(repository_version=non_latest, **self._publication_extra_args()) publish_response = publication_api.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] publication = publication_api.read(publication_href) # Step 5 self.assertEqual(publication.repository_version, non_latest) # Step 6 with self.assertRaises(ApiException): body = { "repository": repo.pulp_href, "repository_version": non_latest } publication_api.create(body)
def do_test(self, policy): """Verify whether content served by pulp can be downloaded. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Select a random content unit in the distribution. Download that content unit from Pulp, and verify that the content unit has the same checksum when fetched directly from Pulp-Fixtures. This test targets the following issues: * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_ * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_ """ repo_api = deb_repository_api remote_api = deb_remote_api publication_api = self.Meta.publication_api distribution_api = deb_distribution_api repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_deb_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = self.Meta.Publication(repository=repo.pulp_href) publish_response = publication_api.create(publish_data) publication_href = monitor_task(publish_response.task)[0] self.addCleanup(publication_api.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distribution_api.create(body) distribution_href = monitor_task(distribution_response.task)[0] distribution = distribution_api.read(distribution_href) self.addCleanup(distribution_api.delete, distribution.pulp_href) # Pick a content unit (of each type), and download it from both Pulp Fixtures… unit_paths = [ choice(paths) for paths in self.Meta.get_content_unit_paths(repo).values() if paths ] fixtures_hashes = [ hashlib.sha256( utils.http_get(urljoin(DEB_FIXTURE_URL, unit_path[0]))).hexdigest() for unit_path in unit_paths ] # …and Pulp. pulp_hashes = [] cfg = config.get_config() for unit_path in unit_paths: content = download_content_unit(cfg, distribution.to_dict(), unit_path[1]) pulp_hashes.append(hashlib.sha256(content).hexdigest()) self.assertEqual(fixtures_hashes, pulp_hashes)
def test_05_delete(self): """Delete a remote.""" response = self.remote_api.delete(self.remote.pulp_href) monitor_task(response.task) with self.assertRaises(ApiException): self.remote_api.read(self.remote.pulp_href)
def do_publish(self, expected_values): """Publish particular repository with complex distributions. 1. Create a repository with complex distribtuions. 2. Create a publication. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Assert that InRelease file path is equal to desired file path. 5. Assert that the codename, suite and component are as expected. """ # Create a repository: repo = deb_repository_api.create(gen_repo()) self.addCleanup(deb_repository_api.delete, repo.pulp_href) # Create a remote: body = gen_deb_remote( url=DEB_COMPLEX_DISTS_FIXTURE_URL, distributions=expected_values["distribution"] ) remote = deb_remote_api.create(body) self.addCleanup(deb_remote_api.delete, remote.pulp_href) # Sync the repository: repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = deb_repository_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = deb_repository_api.read(repo.pulp_href) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) self.assertIsNotNone(repo.latest_version_href) # Create a publication: publish_data = DebAptPublication( repository=repo.pulp_href, **self._publication_extra_args() ) publish_response = deb_apt_publication_api.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(deb_apt_publication_api.delete, publication_href) publication = deb_apt_publication_api.read(publication_href) # Test the publication: self.assertEqual(publication.repository_version, version_hrefs[-1]) release_file = get_content( repo=publication.to_dict(), version_href=publication.repository_version )[DEB_RELEASE_FILE_NAME][0] release_file_path = os.path.join(expected_values["release_file_folder"], "InRelease") self.assertEqual(release_file_path, release_file["relative_path"]) self.assertEqual(expected_values["distribution"], release_file["distribution"]) self.assertEqual(expected_values["codename"], release_file["codename"]) self.assertEqual(expected_values["suite"], release_file["suite"]) release = get_content( repo=publication.to_dict(), version_href=publication.repository_version )[DEB_RELEASE_NAME][0] self.assertEqual(expected_values["distribution"], release["distribution"]) self.assertEqual(expected_values["codename"], release["codename"]) self.assertEqual(expected_values["suite"], release["suite"]) components = get_content( repo=publication.to_dict(), version_href=publication.repository_version )[DEB_RELEASE_COMPONENT_NAME] self.assertEqual(len(expected_values["components"]), len(components)) for component in components: self.assertIn(component["component"], expected_values["components"]) package_indecies = get_content( repo=publication.to_dict(), version_href=publication.repository_version )[DEB_PACKAGE_INDEX_NAME] self.assertEqual(len(expected_values["package_index_paths"]), len(package_indecies)) for package_index in package_indecies: self.assertIn(package_index["relative_path"], expected_values["package_index_paths"]) # Create a distribution: body = gen_distribution() body["publication"] = publication_href distribution_response = deb_distribution_api.create(body) distribution_href = monitor_task(distribution_response.task)[0] distribution = deb_distribution_api.read(distribution_href) self.addCleanup(deb_distribution_api.delete, distribution.pulp_href) # Check that the expected Release files and package indecies are there: cfg = config.get_config() release_file_path = os.path.join(expected_values["release_file_folder"], "Release") download_content_unit(cfg, distribution.to_dict(), release_file_path) for package_index_path in expected_values["package_index_paths"]: download_content_unit(cfg, distribution.to_dict(), package_index_path)