def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.client = api.Client(cls.cfg, api.json_handler) cls.core_client = CoreApiClient( configuration=cls.cfg.get_bindings_config()) cls.rpm_client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.rpm_client) cls.remote_api = RemotesRpmApi(cls.rpm_client) cls.exporter_api = ExportersPulpApi(cls.core_client) cls.exports_api = ExportersCoreExportsApi(cls.core_client) (cls.repo, cls.remote) = cls._setup_repositories()
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.client = api.Client(cls.cfg, api.json_handler) cls.rpm_client = gen_rpm_client() cls.comps_api = RpmCompsApi(cls.rpm_client) cls.repo_api = RepositoriesRpmApi(cls.rpm_client) cls.repo_version_api = RepositoriesRpmVersionsApi(cls.rpm_client) cls.groups_api = ContentPackagegroupsApi(cls.rpm_client) cls.envs_api = ContentPackageenvironmentsApi(cls.rpm_client) cls.groupslangpacks_api = ContentPackagelangpacksApi(cls.rpm_client) cls.categories_api = ContentPackagecategoriesApi(cls.rpm_client) cls.small_content = SMALL_GROUPS + SMALL_CATEGORY + SMALL_LANGPACK + SMALL_ENVIRONMENTS cls.centos8_content = BIG_GROUPS + BIG_CATEGORY + BIG_LANGPACK + BIG_ENVIRONMENTS
def setUpClass(cls): """Verify whether dnf or yum are present.""" cls.cfg = config.get_config() configuration = cls.cfg.get_bindings_config() core_client = CoreApiClient(configuration) cls.artifacts_api = ArtifactsApi(core_client) cls.client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.client) cls.remote_api = RemotesRpmApi(cls.client) cls.publications = PublicationsRpmApi(cls.client) cls.distributions = DistributionsRpmApi(cls.client) cls.before_consumption_artifact_count = 0 cls.pkg_mgr = cli.PackageManager(cls.cfg) cls.pkg_mgr.raise_if_unsupported(unittest.SkipTest, "This test requires dnf or yum.")
def test_file_decriptors(self): """Test whether file descriptors are closed properly. This test targets the following issue: `Pulp #4073 <https://pulp.plan.io/issues/4073>`_ Do the following: 1. Check if 'lsof' is installed. If it is not, skip this test. 2. Create and sync a repo. 3. Run the 'lsof' command to verify that files in the path ``/var/lib/pulp/`` are closed after the sync. 4. Assert that issued command returns `0` opened files. """ cli_client = cli.Client(self.cfg, cli.echo_handler) # check if 'lsof' is available if cli_client.run(("which", "lsof")).returncode != 0: raise unittest.SkipTest("lsof package is not present") repo_api = RepositoriesRpmApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) remote_api = RemotesRpmApi(self.client) remote = remote_api.create(gen_rpm_remote()) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) cmd = "lsof -t +D {}".format(MEDIA_PATH).split() response = cli_client.run(cmd).stdout self.assertEqual(len(response), 0, response)
def setUpClass(cls): """Create class-wide variables.""" cls.client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.client) cls.remote_api = RemotesRpmApi(cls.client) cls.repometadatafiles = ContentRepoMetadataFilesApi(cls.client) delete_orphans() # Certificates processing cls.cdn_client_cert = False if (os.environ["CDN_CLIENT_CERT"] and os.environ["CDN_CLIENT_KEY"] and os.environ["CDN_CA_CERT"]): # strings have escaped newlines from environmental variable cls.cdn_client_cert = os.environ["CDN_CLIENT_CERT"].replace( "\\n", "\n") cls.cdn_client_key = os.environ["CDN_CLIENT_KEY"].replace( "\\n", "\n") cls.cdn_ca_cert = os.environ["CDN_CA_CERT"].replace("\\n", "\n")
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.client) cls.remote_api = RemotesRpmApi(cls.client) cls.repometadatafiles = ContentRepoMetadataFilesApi(cls.client) delete_orphans(cls.cfg) # Certificates processing cls.cdn_client_cert = False if os.environ['CDN_CLIENT_CERT'] \ and os.environ['CDN_CLIENT_KEY'] \ and os.environ['CDN_CA_CERT']: # strings have escaped newlines from environmental variable cls.cdn_client_cert = os.environ['CDN_CLIENT_CERT'].replace( '\\n', '\n') cls.cdn_client_key = os.environ['CDN_CLIENT_KEY'].replace( '\\n', '\n') cls.cdn_ca_cert = os.environ['CDN_CA_CERT'].replace('\\n', '\n')
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.client = api.Client(cls.cfg, api.json_handler) cls.core_client = CoreApiClient(configuration=cls.cfg.get_bindings_config()) cls.rpm_client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.rpm_client) cls.remote_api = RemotesRpmApi(cls.rpm_client) cls.exporter_api = ExportersPulpApi(cls.core_client) cls.exports_api = ExportersPulpExportsApi(cls.core_client) cls.importer_api = ImportersPulpApi(cls.core_client) cls.imports_api = ImportersPulpImportsApi(cls.core_client) cls.dist_tree_api = ContentDistributionTreesApi(cls.rpm_client) cls.import_repos, cls.export_repos, cls.remotes = cls._setup_repositories( RPM_UNSIGNED_FIXTURE_URL ) cls.exporter = cls._create_exporter() cls.export = cls._create_export()
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.client) cls.remote_api = RemotesRpmApi(gen_rpm_client()) cls.publications = PublicationsRpmApi(cls.client) cls.distributions = DistributionsRpmApi(cls.client) if ( os.environ.get("CDN_CLIENT_CERT", None) and os.environ.get("CDN_CLIENT_KEY", None) and os.environ.get("CDN_CA_CERT", None) ): # strings have escaped newlines from environmental variable cls.cdn_client_cert = os.environ["CDN_CLIENT_CERT"].replace("\\n", "\n") cls.cdn_client_key = os.environ["CDN_CLIENT_KEY"].replace("\\n", "\n") cls.cdn_ca_cert = os.environ["CDN_CA_CERT"].replace("\\n", "\n") else: cls.cdn_client_cert = None
def setUpClass(cls): """ Create all the client instances needed to communicate with Pulp. """ configuration = Configuration(**BINDINGS_CONFIGURATION) file_client = FileApiClient(configuration) rpm_client = RpmApiClient(configuration) migration_client = MigrationApiClient(configuration) # Create api clients for File cls.file_repo_api = RepositoriesFileApi(file_client) cls.file_repo_versions_api = RepositoriesFileVersionsApi(file_client) cls.file_remote_api = RemotesFileApi(file_client) cls.file_distribution_api = DistributionsFileApi(file_client) cls.file_publication_api = PublicationsFileApi(file_client) cls.file_content_api = ContentFilesApi(file_client) # Create api clients for RPM cls.rpm_repo_api = RepositoriesRpmApi(rpm_client) cls.rpm_repo_versions_api = RepositoriesRpmVersionsApi(rpm_client) cls.rpm_remote_api = RemotesRpmApi(rpm_client) cls.rpm_distribution_api = DistributionsRpmApi(rpm_client) cls.rpm_publication_api = PublicationsRpmApi(rpm_client) cls.rpm_content_apis = { 'advisory': ContentAdvisoriesApi(rpm_client), 'disttree': ContentDistributionTreesApi(rpm_client), 'modulemd': ContentModulemdsApi(rpm_client), 'modulemd-defaults': ContentModulemdDefaultsApi(rpm_client), 'category': ContentPackagecategoriesApi(rpm_client), 'environment': ContentPackageenvironmentsApi(rpm_client), 'group': ContentPackagegroupsApi(rpm_client), 'langpack': ContentPackagelangpacksApi(rpm_client), 'package': ContentPackagesApi(rpm_client), } # Create api clients for Migration cls.migration_plans_api = MigrationPlansApi(migration_client) cls.pulp2content_api = Pulp2ContentApi(migration_client) cls.pulp2repositories_api = Pulp2RepositoriesApi(migration_client)
def test_all(self): """Test of addtive mode.""" client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) # 1. create repo, remote and sync them repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 2. create another remote and re-sync body = gen_rpm_remote(url=SRPM_UNSIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Check content counts repo = repo_api.read(repo.pulp_href) present_package_count = len( get_content(repo.to_dict())[PULP_TYPE_PACKAGE]) present_advisory_count = len( get_content(repo.to_dict())[PULP_TYPE_ADVISORY]) self.assertEqual( RPM_PACKAGE_COUNT + SRPM_UNSIGNED_FIXTURE_PACKAGE_COUNT, present_package_count) self.assertEqual( RPM_ADVISORY_COUNT + SRPM_UNSIGNED_FIXTURE_ADVISORY_COUNT, present_advisory_count)
def do_test(self, url, policy="on_demand"): """Verify whether content served by pulp can be synced. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Sync other repository using as remote url, the distribution base_url from the previous repository. """ client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) publications = PublicationsRpmApi(client) distributions = DistributionsRpmApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=url, policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) sync_task = tasks.read(sync_response.task) task_duration = sync_task.finished_at - sync_task.started_at waiting_time = sync_task.started_at - sync_task.pulp_created print( "\n-> Sync => Waiting time (s): {wait} | Service time (s): {service}".format( wait=waiting_time.total_seconds(), service=task_duration.total_seconds() ) ) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task(distribution_response.task).created_resources distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) # Create another repo pointing to distribution base_url repo2 = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo2.pulp_href) body = gen_rpm_remote(url=distribution.base_url, policy=policy) remote2 = remote_api.create(body) self.addCleanup(remote_api.delete, remote2.pulp_href) # Sync a Repository repository_sync_data = RpmRepositorySyncURL(remote=remote2.pulp_href) sync_response = repo_api.sync(repo2.pulp_href, repository_sync_data) monitor_task(sync_response.task) sync_task = tasks.read(sync_response.task) task_duration = sync_task.finished_at - sync_task.started_at waiting_time = sync_task.started_at - sync_task.pulp_created print( "\n-> Sync => Waiting time (s): {wait} | Service time (s): {service}".format( wait=waiting_time.total_seconds(), service=task_duration.total_seconds() ) ) repo2 = repo_api.read(repo2.pulp_href) summary = get_content_summary(repo.to_dict()) summary2 = get_content_summary(repo2.to_dict()) self.assertDictEqual(summary, summary2) added = get_added_content_summary(repo.to_dict()) added2 = get_added_content_summary(repo2.to_dict()) self.assertDictEqual(added, added2)
class AdvisoryContentUnitTestCase(PulpTestCase): """ Create and upload advisory content unit. """ BASE_TEST_JSON = """{ "updated": "2014-09-28 00:00:00", "issued": "2014-09-24 00:00:00", "id": "RHSA-XXXX:XXXX", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "bear-4.1-1.noarch.rpm", "name": "bear", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "4.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "solution": "Not available", "fromstr": "*****@*****.**"}""" BEAR_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "bear-4.1-1.noarch.rpm", "name": "bear", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "4.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" CAMEL_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "camel-0.1-1.noarch.rpm", "name": "camel", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "0.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" CAMEL_BIRD_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "camel-0.1-1.noarch.rpm", "name": "camel", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "0.1" }, { "arch": "noarch", "epoch": "0", "filename": "bird-1.2-3.noarch.rpm", "name": "bird", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "3", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "1.2" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" CAMEL_BEAR_DOG_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "camel-0.1-1.noarch.rpm", "name": "camel", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "0.1" }, { "arch": "noarch", "epoch": "0", "filename": "bear-4.1-1.noarch.rpm", "name": "bear", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "4.1" }, { "arch": "noarch", "epoch": "0", "filename": "dog-6.1-6.noarch.rpm", "name": "dog", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "6", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "6.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" @classmethod def setUpClass(cls): """Create class-wide variable.""" cls.cfg = config.get_config() cls.client = api.Client(cls.cfg) delete_orphans() cls.rpm_client = gen_rpm_client() cls.tasks_api = TasksApi(core_client) cls.content_api = ContentAdvisoriesApi(cls.rpm_client) cls.bad_file_to_use = os.path.join(RPM_UNSIGNED_FIXTURE_URL, RPM_PACKAGE_FILENAME) def setUp(self): """Per-test setup.""" self.repo_api = RepositoriesRpmApi(self.rpm_client) self.repo = self.repo_api.create(gen_repo()) self.assertEqual(self.repo.latest_version_href, f"{self.repo.pulp_href}versions/0/") def tearDown(self): """TearDown.""" self.repo_api.delete(self.repo.pulp_href) delete_orphans() def test_upload_wrong_type(self): """Test that a proper error is raised when wrong file content type is uploaded.""" with self.assertRaises(ApiException) as e: self.do_test(self.bad_file_to_use) self.assertTrue("JSON" in e.exception.body) def test_upload_json(self): """Test upload advisory from JSON file.""" upload = self.do_test_json() content = monitor_task(upload.task).created_resources[0] advisory = self.content_api.read(content) self.assertTrue(advisory.id == "RHSA-XXXX:XXXX") def test_merging(self): """Test the 'same' advisory, diff pkglists, into a repo, expecting a merged package-list.""" upload = self.do_test_json(advisory=self.BEAR_JSON, repository=self.repo) task_response = monitor_task(upload.task) advisory_href, vers_href = self._from_results(task_response, "CEBA-2019--666") self.assertEqual(vers_href, f"{self.repo.pulp_href}versions/1/") bear = self.content_api.read(advisory_href) self.assertTrue("CEBA-2019--666", bear.id) self.assertEqual(1, len(bear.pkglist)) self.assertEqual(1, len(bear.pkglist[0].packages)) # Second upload, no pkg-intersection - add both collections # NOTE: also check that unnamed-collections are now named "collection_N", so # they can be uniquely identified upload = self.do_test_json(advisory=self.CAMEL_JSON, repository=self.repo) task_response = monitor_task(upload.task) advisory_href, vers_href = self._from_results(task_response, "CEBA-2019--666") self.assertEqual(vers_href, f"{self.repo.pulp_href}versions/2/") cambear = self.content_api.read(advisory_href) self.assertEqual("CEBA-2019--666", cambear.id) self.assertEqual(2, len(cambear.pkglist)) coll_names = [row.name for row in cambear.pkglist] self.assertTrue("collection_0" in coll_names) self.assertTrue("collection_1" in coll_names) self.assertEqual(1, len(cambear.pkglist[0].packages)) self.assertEqual(1, len(cambear.pkglist[1].packages)) names = [plist.packages[0]["name"] for plist in cambear.pkglist] self.assertTrue("camel" in names) self.assertTrue("bear" in names) # Third upload, two pkgs, intersects with existing, expect AdvisoryConflict failure upload = self.do_test_json(advisory=self.CAMEL_BIRD_JSON, repository=self.repo) with self.assertRaises(PulpTaskError) as ctx: task_response = monitor_task(upload.task) self.assertTrue("neither package list is a proper subset of the other" in str(ctx.exception)) # Fourth upload, intersecting pkglists, expecting three pkgs upload = self.do_test_json(advisory=self.CAMEL_BEAR_DOG_JSON, repository=self.repo) task_response = monitor_task(upload.task) advisory_href, vers_href = self._from_results(task_response, "CEBA-2019--666") self.assertEqual(vers_href, f"{self.repo.pulp_href}versions/3/") cambeardog = self.content_api.read(advisory_href) self.assertEqual("CEBA-2019--666", cambeardog.id) self.assertEqual(1, len(cambeardog.pkglist)) # Expect one collection, not a merge names = [pkg["name"] for pkg in cambeardog.pkglist[0].packages] self.assertEqual(3, len(names)) self.assertTrue("camel" in names) self.assertTrue("bear" in names) self.assertTrue("dog" in names) def _from_results(self, response, advisory_id): self.assertEqual(2, len(response.created_resources)) vers_href = None for rsrc in response.created_resources: if "versions" in rsrc: vers_href = rsrc advisories = self.content_api.list(id=advisory_id, repository_version=vers_href) self.assertEqual(1, len(advisories.results)) return advisories.results[0].pulp_href, vers_href def do_test(self, remote_path): """Upload wrong type of the file.""" with NamedTemporaryFile() as file_to_upload: file_to_upload.write(http_get(remote_path)) upload_attrs = { "file": file_to_upload.name, } return self.content_api.create(**upload_attrs) def do_test_json(self, advisory=BASE_TEST_JSON, repository=None): """Upload advisory from a json file.""" with NamedTemporaryFile("w+") as file_to_upload: json.dump(json.loads(advisory), file_to_upload) upload_attrs = { "file": file_to_upload.name, } if repository: upload_attrs["repository"] = repository.pulp_href file_to_upload.flush() return self.content_api.create(**upload_attrs)
def do_test(self, policy): """Verify whether content served by pulp can be synced. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Sync other repository using as remote url, the distribution base_url from the previous repository. """ client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) publications = PublicationsRpmApi(client) distributions = DistributionsRpmApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=RPM_KICKSTART_FIXTURE_URL, policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = RpmRpmPublication( repository=repo.pulp_href, metadata_checksum_type="sha1", package_checksum_type="sha224", ) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task(distribution_response.task) distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) # Create another repo pointing to distribution base_url repo2 = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo2.pulp_href) body = gen_rpm_remote(url=distribution.base_url, policy=policy) remote2 = remote_api.create(body) self.addCleanup(remote_api.delete, remote2.pulp_href) # Sync a Repository repository_sync_data = RpmRepositorySyncURL(remote=remote2.pulp_href) sync_response = repo_api.sync(repo2.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo2 = repo_api.read(repo2.pulp_href) summary = get_content_summary(repo.to_dict()) summary2 = get_content_summary(repo2.to_dict()) self.assertDictEqual(summary, summary2) added = get_added_content_summary(repo.to_dict()) added2 = get_added_content_summary(repo2.to_dict()) self.assertDictEqual(added, added2)
def rpm_repository_api(rpm_client): """Fixture for RPM repositories API.""" return RepositoriesRpmApi(rpm_client)
def test_all(self): """Verify whether content served by pulp can be downloaded. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Select a random content unit in the distribution. Download that content unit from Pulp, and verify that the content unit has the same checksum when fetched directly from Pulp-Fixtures. This test targets the following issues: * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_ * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_ """ cfg = config.get_config() client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) publications = PublicationsRpmApi(client) distributions = DistributionsRpmApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task(distribution_response.task) distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) # Pick a content unit (of each type), and download it from both Pulp Fixtures… unit_path = choice(get_rpm_package_paths(repo.to_dict())) fixture_hash = hashlib.sha256( utils.http_get(urljoin(RPM_UNSIGNED_FIXTURE_URL, unit_path)) ).hexdigest() # …and Pulp. pkg_path = get_package_repo_path(unit_path) content = download_content_unit(cfg, distribution.to_dict(), pkg_path) pulp_hash = hashlib.sha256(content).hexdigest() self.assertEqual(fixture_hash, pulp_hash)
def do_test(self, download_policy): """Sync repositories with the different ``download_policy``. Do the following: 1. Create a repository, and a remote. 2. Sync the remote. 3. Assert that repository version is not None. 4. Assert that the correct number of possible units to be downloaded were shown. 5. Sync the remote one more time in order to create another repository version. 6. Assert that repository version is different from the previous one. 7. Assert that the same number of units are shown, and after the second sync no extra units should be shown, since the same remote was synced again. 8. Publish repository synced with lazy ``download_policy``. """ repo_api = RepositoriesRpmApi(self.client) remote_api = RemotesRpmApi(self.client) publications = PublicationsRpmApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(**{"policy": download_policy}) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertDictEqual(get_content_summary(repo.to_dict()), RPM_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), RPM_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo.latest_version_href sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertEqual(latest_version_href, repo.latest_version_href) self.assertDictEqual(get_content_summary(repo.to_dict()), RPM_FIXTURE_SUMMARY) # Publish publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) self.assertIsNotNone(publication.repository) self.assertIsNotNone(publication.repository_version)
def setUpClass(cls): """Create class-wide variables.""" cls.client = gen_rpm_client() cls.repo_api = RepositoriesRpmApi(cls.client) cls.remote_api = RemotesRpmApi(cls.client)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) publications = PublicationsRpmApi(client) distributions = DistributionsRpmApi(client) body = gen_rpm_remote() remote = remote_api.create(body) repo = repo_api.create(gen_repo()) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Step 1 repo = repo_api.read(repo.pulp_href) repo_content = get_content( repo.to_dict())[RPM_PACKAGE_CONTENT_NAME][:-1] for rpm_content in repo_content: modify_repo(cfg, repo.to_dict(), remove_units=[rpm_content]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) non_latest = choice(version_hrefs[1:-1]) # Step 2 publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] publication = publications.read(publication_href) # Step 3 self.assertEqual(publication.repository_version, version_hrefs[-1]) # Step 4 publish_data.repository_version = non_latest publish_data.repository = None publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] publication = publications.read(publication_href) # Step 5 body = gen_distribution() body["base_path"] = "pulp_pre_upgrade_test" body["publication"] = publication.pulp_href distribution_response = distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = distributions.read(created_resources[0]) # Step 6 self.assertEqual(publication.repository_version, non_latest) # Step 7 with self.assertRaises(ApiException): body = { "repository": repo.pulp_href, "repository_version": non_latest } publications.create(body) # Step 8 url = cfg.get_content_host_base_url( ) + "/pulp/content/pulp_pre_upgrade_test/" self.assertEqual(url, distribution.base_url, url)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) publications = PublicationsRpmApi(client) body = gen_rpm_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Step 1 repo = repo_api.read(repo.pulp_href) for rpm_content in get_content( repo.to_dict())[RPM_PACKAGE_CONTENT_NAME]: modify_repo(cfg, repo.to_dict(), add_units=[rpm_content]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo.to_dict())) non_latest = choice(version_hrefs[:-1]) # Step 2 publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) publication = publications.read(publication_href) # Step 3 self.assertEqual(publication.repository_version, version_hrefs[-1]) # Step 4 publish_data.repository_version = non_latest publish_data.repository = None publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] publication = publications.read(publication_href) # Step 5 self.assertEqual(publication.repository_version, non_latest) # Step 6 with self.assertRaises(ApiException): body = { "repository": repo.pulp_href, "repository_version": non_latest } publications.create(body)
def do_test(self, policy): """Verify whether content served by Pulp can be synced. The initial sync to Pulp is one of many different download policies, the second sync is immediate in order to exercise downloading all of the files. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Sync other repository using as remote url, the distribution base_url from the previous repository. """ client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) publications = PublicationsRpmApi(client) distributions = DistributionsRpmApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=RPM_KICKSTART_FIXTURE_URL, policy=policy) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync a Repository repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) # Create a publication. publish_data = RpmRpmPublication( repository=repo.pulp_href, metadata_checksum_type="sha384", package_checksum_type="sha224", ) publish_response = publications.create(publish_data) created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(publications.delete, publication_href) # Create a distribution. body = gen_distribution() body["publication"] = publication_href distribution_response = distributions.create(body) created_resources = monitor_task(distribution_response.task).created_resources distribution = distributions.read(created_resources[0]) self.addCleanup(distributions.delete, distribution.pulp_href) # Create another repo pointing to distribution base_url repo2 = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo2.pulp_href) body = gen_rpm_remote(url=distribution.base_url, policy="immediate") remote2 = remote_api.create(body) self.addCleanup(remote_api.delete, remote2.pulp_href) # Sync a Repository repository_sync_data = RpmRepositorySyncURL(remote=remote2.pulp_href) sync_response = repo_api.sync(repo2.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo2 = repo_api.read(repo2.pulp_href) summary = get_content_summary(repo.to_dict()) summary2 = get_content_summary(repo2.to_dict()) self.assertDictEqual(summary, summary2) added = get_added_content_summary(repo.to_dict()) added2 = get_added_content_summary(repo2.to_dict()) self.assertDictEqual(added, added2)
def setUp(self): """Per-test setup.""" self.repo_api = RepositoriesRpmApi(self.rpm_client) self.repo = self.repo_api.create(gen_repo()) self.assertEqual(self.repo.latest_version_href, f"{self.repo.pulp_href}versions/0/")
class AdvisoryContentUnitTestCase(PulpTestCase): """ Create and upload advisory content unit. """ BASE_TEST_JSON = """{ "updated": "2014-09-28 00:00:00", "issued": "2014-09-24 00:00:00", "id": "RHSA-XXXX:XXXX", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "bear-4.1-1.noarch.rpm", "name": "bear", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "4.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "solution": "Not available", "fromstr": "*****@*****.**"}""" BEAR_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "bear-4.1-1.noarch.rpm", "name": "bear", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "4.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" CAMEL_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "camel-0.1-1.noarch.rpm", "name": "camel", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "0.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" CAMEL_BIRD_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "camel-0.1-1.noarch.rpm", "name": "camel", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "0.1" }, { "arch": "noarch", "epoch": "0", "filename": "bird-1.2-3.noarch.rpm", "name": "bird", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "3", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "1.2" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" CAMEL_BEAR_DOG_JSON = """{ "issued": "2020-03-08 20:04:01", "id": "CEBA-2019--666", "type": "Bug Fix Advisory", "release": "1", "version": "1", "pkglist": [ { "packages": [ { "arch": "noarch", "epoch": "0", "filename": "camel-0.1-1.noarch.rpm", "name": "camel", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "0.1" }, { "arch": "noarch", "epoch": "0", "filename": "bear-4.1-1.noarch.rpm", "name": "bear", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "1", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "4.1" }, { "arch": "noarch", "epoch": "0", "filename": "dog-6.1-6.noarch.rpm", "name": "dog", "reboot_suggested": false, "relogin_suggested": false, "restart_suggested": false, "release": "6", "src": "http://www.fedoraproject.org", "sum": "", "sum_type": "", "version": "6.1" } ] } ], "severity": "", "description": "Not available", "reboot_suggested": false, "updated": "2020-03-08 20:04:01", "solution": "Not available", "fromstr": "*****@*****.**" }""" CESA_2020_5002 = """{ "title": "Moderate CentOS curl Security Update", "type": "security", "description": "", "release": "el7", "version": "1", "severity": "Moderate", "status": "final", "updated": "2020-11-18 17:30:30", "issued": "2020-11-18 17:30:30", "pkglist": [ { "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "curl-7.29.0-59.el7_9.1.x86_64.rpm", "release": "59.el7_9.1", "name": "curl", "sum": "dfc95bdd8057839d4b45153318acb4e09f4da257afee1c57c07781870a68ecef", "sum_type": "sha256" }, { "arch": "i686", "epoch": "0", "filename": "libcurl-7.29.0-59.el7_9.1.i686.rpm", "release": "59.el7_9.1", "name": "libcurl", "sum": "3054ca1c0cc8eef5f08ce1d3be56c7a39e97d92361e8bd265bea14d06f590219", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "libcurl-7.29.0-59.el7_9.1.x86_64.rpm", "release": "59.el7_9.1", "name": "libcurl", "sum": "4ad0b71e3a6468fba1b43ab82fad024415b5296c7b77d1348fb9afa3f828f98e", "sum_type": "sha256" }, { "arch": "i686", "epoch": "0", "filename": "libcurl-devel-7.29.0-59.el7_9.1.i686.rpm", "release": "59.el7_9.1", "name": "libcurl-devel", "sum": "7ab4f1b0aa285d3773fdbd8bfc529969ca101a627d3ea88bea1f99a42093e132", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "libcurl-devel-7.29.0-59.el7_9.1.x86_64.rpm", "release": "59.el7_9.1", "name": "libcurl-devel", "sum": "f92fde3f97c0034135796baa7cd55f87c0550a88ac79adbdcc9c7f64c595614b", "sum_type": "sha256" } ] } ], "id": "TEST-CESA-2020:5002", "from": "*****@*****.**", "references": [ { "href": "https://access.redhat.com/errata/RHSA-2020:5002", "ref_id": "CESA-2020:5002", "title": "Moderate CentOS curl Security Update", "ref_type": "security" }, { "href": "https://lists.centos.org/pipermail/centos-announce/2020-November/035840.html", "ref_id": "CESA-2020:5002", "title": "Moderate CentOS curl Security Update", "ref_type": "security" } ] }""" # noqa CESA_2020_4910 = """{ "title": "Important CentOS xorg-x11-server Security Update", "type": "security", "description": "", "release": "el7", "version": "1", "severity": "Important", "status": "final", "updated": "2020-11-06 22:19:48", "issued": "2020-11-06 22:19:48", "pkglist": [ { "packages": [ { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-Xdmx-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-Xdmx", "sum": "0435f345b2b188c76dbb4a538bf0f878834a41e723491df1926231020fd88efd", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-Xephyr-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-Xephyr", "sum": "2d21d53b305e30b058ca88d8778bda67000a5d52ab320f04b35e63f6a78f2163", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-Xnest-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-Xnest", "sum": "51fbacc2e26050a7772549f1fe16c46bd8063ea187825ad89b237c34fa9b4250", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-Xorg-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-Xorg", "sum": "eb89964d5fd40ec94ee8db97a5a14cc8dd6329b83d82ab29ee1a595653ce5223", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-Xvfb-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-Xvfb", "sum": "ea32b047fba7fd327bf943da2a18413a1ed3e245cc1b077f34d1c8f6048d9813", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-Xwayland-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-Xwayland", "sum": "4a6ffb39008edd469d4365bb3bf858f5f5f466129eb9e330d978b28866906891", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-common-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-common", "sum": "339bcf68cb37a454eddff7218aff4153a36bafc0d36e2b5b6bde8311c6f3eed8", "sum_type": "sha256" }, { "arch": "i686", "epoch": "0", "filename": "xorg-x11-server-devel-1.20.4-12.el7_9.i686.rpm", "release": "12.el7_9", "name": "xorg-x11-server-devel", "sum": "55e13fc8624f8a63b785b5194281c38a4670f03113b0ff2b8fc1df1ca473e1e8", "sum_type": "sha256" }, { "arch": "x86_64", "epoch": "0", "filename": "xorg-x11-server-devel-1.20.4-12.el7_9.x86_64.rpm", "release": "12.el7_9", "name": "xorg-x11-server-devel", "sum": "e2dd0c67f3d88a9506f72fcc21ec0af786a377befabac8e1670d3e012d844b06", "sum_type": "sha256" }, { "arch": "noarch", "epoch": "0", "filename": "xorg-x11-server-source-1.20.4-12.el7_9.noarch.rpm", "release": "12.el7_9", "name": "xorg-x11-server-source", "sum": "1baa9cb2d4f8d4300ac333fbc7bc130dce9145c67aea3bd6efa4a0354fc92b6d", "sum_type": "sha256" } ] } ], "id": "TEST-CESA-2020:4910", "from": "*****@*****.**", "references": [ { "href": "https://access.redhat.com/errata/RHSA-2020:4910", "ref_id": "CESA-2020:4910", "title": "Important CentOS xorg-x11-server Security Update", "ref_type": "security" }, { "href": "https://lists.centos.org/pipermail/centos-cr-announce/2020-November/012889.html", "ref_id": "CESA-2020:4910", "title": "Important CentOS xorg-x11-server Security Update", "ref_type": "security" } ] }""" # noqa @classmethod def setUpClass(cls): """Create class-wide variable.""" cls.cfg = config.get_config() cls.client = api.Client(cls.cfg) delete_orphans() cls.rpm_client = gen_rpm_client() cls.tasks_api = TasksApi(core_client) cls.content_api = ContentAdvisoriesApi(cls.rpm_client) cls.bad_file_to_use = os.path.join(RPM_UNSIGNED_FIXTURE_URL, RPM_PACKAGE_FILENAME) def setUp(self): """Per-test setup.""" self.repo_api = RepositoriesRpmApi(self.rpm_client) self.repo = self.repo_api.create(gen_repo()) self.assertEqual(self.repo.latest_version_href, f"{self.repo.pulp_href}versions/0/") def tearDown(self): """TearDown.""" self.repo_api.delete(self.repo.pulp_href) delete_orphans() def test_upload_wrong_type(self): """Test that a proper error is raised when wrong file content type is uploaded.""" with self.assertRaises(ApiException) as e: self.do_test(self.bad_file_to_use) self.assertTrue("JSON" in e.exception.body) def test_upload_json(self): """Test upload advisory from JSON file.""" upload = self.do_test_json() content = monitor_task(upload.task).created_resources[0] advisory = self.content_api.read(content) self.assertTrue(advisory.id == "RHSA-XXXX:XXXX") def test_merging(self): """Test the 'same' advisory, diff pkglists, into a repo, expecting a merged package-list.""" upload = self.do_test_json(advisory=self.BEAR_JSON, repository=self.repo) task_response = monitor_task(upload.task) advisory_href, vers_href = self._from_results(task_response, "CEBA-2019--666") self.assertEqual(vers_href, f"{self.repo.pulp_href}versions/1/") bear = self.content_api.read(advisory_href) self.assertTrue("CEBA-2019--666", bear.id) self.assertEqual(1, len(bear.pkglist)) self.assertEqual(1, len(bear.pkglist[0].packages)) # Second upload, no pkg-intersection - add both collections # NOTE: also check that unnamed-collections are now named "collection_N", so # they can be uniquely identified upload = self.do_test_json(advisory=self.CAMEL_JSON, repository=self.repo) task_response = monitor_task(upload.task) advisory_href, vers_href = self._from_results(task_response, "CEBA-2019--666") self.assertEqual(vers_href, f"{self.repo.pulp_href}versions/2/") cambear = self.content_api.read(advisory_href) self.assertEqual("CEBA-2019--666", cambear.id) self.assertEqual(2, len(cambear.pkglist)) coll_names = [row.name for row in cambear.pkglist] self.assertTrue("collection_0" in coll_names) self.assertTrue("collection_1" in coll_names) self.assertEqual(1, len(cambear.pkglist[0].packages)) self.assertEqual(1, len(cambear.pkglist[1].packages)) names = [plist.packages[0]["name"] for plist in cambear.pkglist] self.assertTrue("camel" in names) self.assertTrue("bear" in names) # Third upload, two pkgs, intersects with existing, expect AdvisoryConflict failure upload = self.do_test_json(advisory=self.CAMEL_BIRD_JSON, repository=self.repo) with self.assertRaises(PulpTaskError) as ctx: task_response = monitor_task(upload.task) self.assertTrue( "neither package list is a proper subset of the other" in str(ctx.exception) ) self.assertTrue("ALLOW_AUTOMATIC_UNSAFE_ADVISORY_CONFLICT_RESOLUTION" in str(ctx.exception)) # Fourth upload, intersecting pkglists, expecting three pkgs upload = self.do_test_json(advisory=self.CAMEL_BEAR_DOG_JSON, repository=self.repo) task_response = monitor_task(upload.task) advisory_href, vers_href = self._from_results(task_response, "CEBA-2019--666") self.assertEqual(vers_href, f"{self.repo.pulp_href}versions/3/") cambeardog = self.content_api.read(advisory_href) self.assertEqual("CEBA-2019--666", cambeardog.id) self.assertEqual(1, len(cambeardog.pkglist)) # Expect one collection, not a merge names = [pkg["name"] for pkg in cambeardog.pkglist[0].packages] self.assertEqual(3, len(names)) self.assertTrue("camel" in names) self.assertTrue("bear" in names) self.assertTrue("dog" in names) def test_8683_error_path(self): """ Test that upload-fail doesn't break all future uploads. See https://pulp.plan.io/issues/8683 for details. """ # Upload an advisory advisory_str = self.CESA_2020_5002 upload = self.do_test_json(advisory=advisory_str, repository=self.repo) task_response = monitor_task(upload.task) self._from_results(task_response, "TEST-CESA-2020:5002") # Try to upload it 'again' and watch it fail with self.assertRaises(PulpTaskError): upload = self.do_test_json(advisory=self.CESA_2020_5002, repository=self.repo) monitor_task(upload.task) # Upload a different advisory and Don't Fail advisory_str = self.CESA_2020_4910 upload = self.do_test_json(advisory=advisory_str, repository=self.repo) task_response = monitor_task(upload.task) advisory_href, vers_href = self._from_results(task_response, "TEST-CESA-2020:4910") advisory = self.content_api.read(advisory_href) # Make sure the second advisory was persisted self.assertEqual("TEST-CESA-2020:4910", advisory.id) def _from_results(self, response, advisory_id): self.assertEqual(2, len(response.created_resources)) vers_href = None for rsrc in response.created_resources: if "versions" in rsrc: vers_href = rsrc advisories = self.content_api.list(id=advisory_id, repository_version=vers_href) self.assertEqual(1, len(advisories.results)) return advisories.results[0].pulp_href, vers_href def do_test(self, remote_path): """Upload wrong type of the file.""" with NamedTemporaryFile() as file_to_upload: file_to_upload.write(http_get(remote_path)) upload_attrs = { "file": file_to_upload.name, } return self.content_api.create(**upload_attrs) def do_test_json(self, advisory=BASE_TEST_JSON, repository=None): """Upload advisory from a json file.""" with NamedTemporaryFile("w+") as file_to_upload: json.dump(json.loads(advisory), file_to_upload) upload_attrs = { "file": file_to_upload.name, } if repository: upload_attrs["repository"] = repository.pulp_href file_to_upload.flush() return self.content_api.create(**upload_attrs)