def test_all(self): """Test of mirror mode.""" client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) # 1. create repo, remote and sync them repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=SRPM_UNSIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 2. create another remote and re-sync body = gen_rpm_remote(url=RPM_SIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href, mirror=True) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Check that only new content is present repo = repo_api.read(repo.pulp_href) self.assertDictEqual(RPM_FIXTURE_SUMMARY, get_content_summary(repo.to_dict()))
def test_sync_packages_with_unsupported_checksum_type(self): """ Sync an RPM repository with an unsupported checksum (md5). This test require disallowed 'MD5' checksum type from ALLOWED_CONTENT_CHECKSUMS settings. """ # 1. create repo and remote repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(policy="on_demand", url=RPM_MD5_REPO_FIXTURE_URL) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # 2. Sync it repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) with self.assertRaises(PulpTaskError) as ctx: monitor_task(sync_response.task) self.assertIn( "rpm-with-md5/bear-4.1-1.noarch.rpm contains forbidden checksum type", ctx.exception.task.error["description"], )
def test_01_sync(self): """Assert that syncing the repository triggers auto-publish and auto-distribution.""" self.assertEqual(self.publications_api.list().count, 0) self.assertTrue(self.distribution.publication is None) # Sync the repository. repository_sync_data = RpmRepositorySyncURL( remote=self.remote.pulp_href) sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) task = monitor_task(sync_response.task) self.distribution = self.distributions_api.read( self.distribution.pulp_href) # Check that all the appropriate resources were created self.assertGreater(len(task.created_resources), 1) self.assertEqual(self.publications_api.list().count, 1) self.assertTrue(self.distribution.publication is not None) self.assertTrue( self.distribution.publication in task.created_resources) # Check that the publish settings were used publication = self.publications_api.read(self.distribution.publication) self.assertEqual(publication.sqlite_metadata, True) # Sync the repository again. Since there should be no new repository version, there # should be no new publications or distributions either. sync_response = self.repo_api.sync(self.repo.pulp_href, repository_sync_data) task = monitor_task(sync_response.task) self.assertEqual(len(task.created_resources), 0) self.assertEqual(self.publications_api.list().count, 1)
def do_test_remove_unit(self, remote_url): """ Sync repository and test that content can't be removed directly. """ repo = self.repo_api.create(gen_repo()) remote_body = gen_rpm_remote(remote_url, policy="on_demand") remote = self.remote_api.create(remote_body) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) # Test remove content by types contained in repository. repo_content = get_content(repo.to_dict()) base_addr = self.cfg.get_host_settings()[0]["url"] for content_type in repo_content.keys(): response = requests.delete( urljoin(base_addr, repo_content[content_type][0]["pulp_href"])) # check that '405' (method not allowed) is returned self.assertEqual(response.status_code, 405)
def test_publish_with_unsupported_checksum_type(self): """ Sync and try publish an RPM repository. - Sync repository with on_demand policy - Try to publish with 'md5' checksum type - Publish should fail because 'md5' is not allowed This test require disallowed 'MD5' checksum type from ALLOWED_CONTENT_CHECKSUMS settings. """ # 1. create repo and remote repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(policy="on_demand") remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # 2. Sync it repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Publish and fail publish_data = RpmRpmPublication(repository=repo.pulp_href, package_checksum_type="md5") with self.assertRaises(ApiException) as ctx: self.publications.create(publish_data) self.assertIn("Checksum must be one of the allowed checksum types.", ctx.exception.body)
def _setup_repositories(cls, url=None): """Create and sync a number of repositories to be exported.""" # create and remember a set of repo import_repos = [] export_repos = [] remotes = [] for r in range(NUM_REPOS): import_repo = cls.repo_api.create(gen_repo()) export_repo = cls.repo_api.create(gen_repo()) if url: body = gen_rpm_remote(url) else: body = gen_rpm_remote() remote = cls.remote_api.create(body) repository_sync_data = RpmRepositorySyncURL( remote=remote.pulp_href) sync_response = cls.repo_api.sync(export_repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # remember it export_repos.append(export_repo) import_repos.append(import_repo) remotes.append(remote) return import_repos, export_repos, remotes
def do_test(self, repository=None, remote=None): """Sync a repository. Args: repository (pulp_rpm.app.models.repository.RpmRepository): object of RPM repository remote (pulp_rpm.app.models.repository.RpmRemote): object of RPM Remote Returns (tuple): tuple of instances of pulp_rpm.app.models.repository.RpmRepository, pulp_rpm.app.models.repository.RpmRemote """ if repository: repo = self.repo_api.read(repository.pulp_href) else: repo = self.repo_api.create(gen_repo()) self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") if not remote: body = gen_rpm_remote() remote = self.remote_api.create(body) else: remote = self.remote_api.read(remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) return self.repo_api.read(repo.pulp_href), self.remote_api.read( remote.pulp_href)
def _do_test(self, url): # Convert a url into a name-string name = self._name_from_url(url) # Create a repo repo = self._create_repo_for(url) self.assertIsNotNone(repo) self.addCleanup(self.repo_api.delete, repo.pulp_href) # Create a remote remote = self._create_remote_for(url) self.assertIsNotNone(remote) self.addCleanup(self.remote_api.delete, remote.pulp_href) # Sync the repo using the remote repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # Publish the result publication_href = self._create_publication_for(repo) self.assertIsNotNone(publication_href) self.addCleanup(self.publications.delete, publication_href) # Distribute the published version distribution = self._create_distribution_for(name, publication_href) self.assertIsNotNone(distribution) self.addCleanup(self.distributions.delete, distribution.pulp_href)
def _sync(url=None): repo = cls.repo_api.create(gen_repo()) remote = cls.remote_api.create(gen_rpm_remote(url)) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = cls.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) return cls.repo_api.read(repo.pulp_href)
def test_sync_advisory_proper_subset_pgk_list(self): """Test success: sync advisories where pkglist is proper-subset of another. If update_dates and update_version are the same, pkglist intersection is non-empty and a proper-subset of the 'other' pkglist, sync should succeed. """ body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) remote = self.remote_api.create(body) # sync repo, remote = self.do_test(remote=remote) # add remote to clean up self.addCleanup(self.remote_api.delete, remote.pulp_href) # create remote with colliding advisory body = gen_rpm_remote(RPM_ADVISORY_INCOMPLETE_PKG_LIST_URL) remote = self.remote_api.create(body) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) try: monitor_task(sync_response.task) except Exception as e: self.fail("Unexpected exception {}".format(e.message))
def test_child_detection(self): """Test copy advisory and its direct package & module dependencies to empty repository. No recursive dependencies. - Create repository and populate it - Create empty repository - Use 'copy' to copy an advisory - assert advisory and its dependencies were copied """ empty_repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, empty_repo.pulp_href) repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=RPM_MODULAR_FIXTURE_URL) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) test_advisory_href = get_all_content_hrefs( self.rpm_advisory_content_api, repository_version=repo.latest_version_href, id=self.test_advisory, )[0] content_to_copy = [] content_to_copy.append(test_advisory_href) config = [{ "source_repo_version": repo.latest_version_href, "dest_repo": empty_repo.pulp_href, "content": content_to_copy, }] rpm_copy(self.cfg, config, recursive=False) empty_repo = self.repo_api.read(empty_repo.pulp_href) empty_repo_packages = [ pkg["name"] for pkg in get_content(empty_repo.to_dict())[PULP_TYPE_PACKAGE] ] empty_repo_advisories = [ advisory["id"] for advisory in get_content(empty_repo.to_dict()) [PULP_TYPE_ADVISORY] ] self.assertEqual(len(empty_repo_advisories), 1) # assert that 3 packages were copied, the direct children of the advisory self.assertEqual(len(empty_repo_packages), 3) # assert dependencies package names for dependency in self.test_advisory_dependencies: self.assertIn(dependency, empty_repo_packages)
def do_test(self, policy, sync_policy, url=RPM_UNSIGNED_FIXTURE_URL): """Verify whether package manager can consume content from Pulp.""" if not self._has_dnf(): self.skipTest("This test requires dnf") body = gen_rpm_remote(policy=policy) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repo = self.repo_api.create( gen_repo(autopublish=sync_policy != "mirror_complete")) self.addCleanup(self.repo_api.delete, repo.pulp_href) before_sync_artifact_count = self.artifacts_api.list().count self.assertEqual(before_sync_artifact_count, 0) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href, sync_policy=sync_policy) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) created_resources = monitor_task(sync_response.task).created_resources publication_href = [ r for r in created_resources if "publication" in r ][0] self.addCleanup(self.publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = self.distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = self.distributions.read(created_resources[0]) self.addCleanup(self.distributions.delete, distribution.pulp_href) cli_client = cli.Client(self.cfg) cli_client.run(("sudo", "dnf", "config-manager", "--add-repo", distribution.base_url)) repo_id = "*{}_".format(distribution.base_path) cli_client.run(( "sudo", "dnf", "config-manager", "--save", "--setopt={}.gpgcheck=0".format(repo_id), repo_id, )) self.addCleanup( cli_client.run, ("sudo", "dnf", "config-manager", "--disable", repo_id)) self.before_consumption_artifact_count = self.artifacts_api.list( ).count rpm_name = "walrus" self.pkg_mgr.install(rpm_name) self.addCleanup(self.pkg_mgr.uninstall, rpm_name) rpm = cli_client.run(("rpm", "-q", rpm_name)).stdout.strip().split("-") self.assertEqual(rpm_name, rpm[0])
def do_test(self, with_sqlite): """Sync and publish an RPM repository.""" # 1. create repo and remote repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(policy="on_demand") remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # 2. Sync it repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Publish and distribute publish_data = RpmRpmPublication(repository=repo.pulp_href, sqlite_metadata=with_sqlite) publish_response = self.publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(self.publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = self.distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = self.distributions.read(created_resources[0]) self.addCleanup(self.distributions.delete, distribution.pulp_href) repomd = ElementTree.fromstring( http_get(os.path.join(distribution.base_url, "repodata/repomd.xml"))) data_xpath = "{{{}}}data".format(RPM_NAMESPACES["metadata/repo"]) data_elems = [elem for elem in repomd.findall(data_xpath)] sqlite_files = [ elem for elem in data_elems if elem.get("type").endswith("_db") ] if with_sqlite: self.assertEqual(3, len(sqlite_files)) for db_elem in sqlite_files: location_xpath = "{{{}}}location".format( RPM_NAMESPACES["metadata/repo"]) db_href = db_elem.find(location_xpath).get("href") http_get(os.path.join(distribution.base_url, db_href)) else: self.assertEqual(0, len(sqlite_files))
def test_all(self): """Sync and publish an RPM repository and verify the checksum.""" # 1. create repo and remote repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote() remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # 2. Sync it repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Publish and distribute publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = self.publications.create(publish_data) created_resources = monitor_task(publish_response.task) publication_href = created_resources[0] self.addCleanup(self.publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = self.distributions.create(body) created_resources = monitor_task(distribution_response.task) distribution = self.distributions.read(created_resources[0]) self.addCleanup(self.distributions.delete, distribution.pulp_href) # 4. check the tag 'sum' is not present in updateinfo.xml repomd = ElementTree.fromstring( http_get(os.path.join(distribution.base_url, 'repodata/repomd.xml'))) with NamedTemporaryFile() as temp_file: update_xml_url = self._get_updateinfo_xml_path(repomd) update_xml_content = http_get( os.path.join(distribution.base_url, update_xml_url)) temp_file.write(update_xml_content) temp_file.seek(0) # TODO: fix this as in CI update_info.xml has '.gz' but # it is not gzipped try: update_xml = gzip.open(temp_file.name).read() except OSError: update_xml = temp_file.read() update_info_content = ElementTree.fromstring(update_xml) tags = {elem.tag for elem in update_info_content.iter()} self.assertNotIn('sum', tags, update_info_content)
def test_all(self): """Test of addtive mode.""" client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) # 1. create repo, remote and sync them repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 2. create another remote and re-sync body = gen_rpm_remote(url=SRPM_UNSIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Check content counts repo = repo_api.read(repo.pulp_href) present_package_count = len( get_content(repo.to_dict())[PULP_TYPE_PACKAGE]) present_advisory_count = len( get_content(repo.to_dict())[PULP_TYPE_ADVISORY]) self.assertEqual( RPM_PACKAGE_COUNT + SRPM_UNSIGNED_FIXTURE_PACKAGE_COUNT, present_package_count) self.assertEqual( RPM_ADVISORY_COUNT + SRPM_UNSIGNED_FIXTURE_ADVISORY_COUNT, present_advisory_count)
def test_is_modular_flag( rpm_repository_api, rpm_package_api, rpm_rpmremote_api, gen_object_with_cleanup, delete_orphans_pre, ): """ Test package is marked as modular when synced from modular repository. """ remote_data = gen_rpm_remote(RPM_SIGNED_FIXTURE_URL) remote = gen_object_with_cleanup(rpm_rpmremote_api, remote_data) repo_data = gen_repo(remote=remote.pulp_href) repo = gen_object_with_cleanup(rpm_repository_api, repo_data) sync_url = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = rpm_repository_api.sync(repo.pulp_href, sync_url) monitor_task(sync_response.task) # assert no package is marked modular assert rpm_package_api.list( ).count == RPM_FIXTURE_SUMMARY[RPM_PACKAGE_CONTENT_NAME] for pkg in get_content(repo.to_dict())[RPM_PACKAGE_CONTENT_NAME]: assert pkg["is_modular"] is False remote_modular_data = gen_rpm_remote(RPM_MODULAR_FIXTURE_URL) remote_modular = gen_object_with_cleanup(rpm_rpmremote_api, remote_modular_data) repo_modular_data = gen_repo(remote=remote_modular.pulp_href) repo_modular = gen_object_with_cleanup(rpm_repository_api, repo_modular_data) sync_url = RpmRepositorySyncURL(remote=remote_modular.pulp_href) sync_response = rpm_repository_api.sync(repo_modular.pulp_href, sync_url) monitor_task(sync_response.task) # assert all package from modular repo is marked as modular for pkg in get_content(repo_modular.to_dict())[RPM_PACKAGE_CONTENT_NAME]: assert pkg["is_modular"] is True
def test_strict_copy_package_to_empty_repo(self): """Test copy package and its dependencies to empty repository. - Create repository and populate it - Create empty repository - Use 'copy' to copy 'whale' package with dependencies - assert package and its dependencies were copied """ empty_repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, empty_repo.pulp_href) repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) test_package_href = [ pkg for pkg in get_content(repo.to_dict())[PULP_TYPE_PACKAGE] if pkg["name"] == self.test_package ][0]["pulp_href"] package_to_copy = [] package_to_copy.append(test_package_href) config = [{ "source_repo_version": repo.latest_version_href, "dest_repo": empty_repo.pulp_href, "content": package_to_copy, }] rpm_copy(self.cfg, config, recursive=True) empty_repo = self.repo_api.read(empty_repo.pulp_href) empty_repo_packages = [ pkg["name"] for pkg in get_content(empty_repo.to_dict())[PULP_TYPE_PACKAGE] ] # assert that only 3 packages are copied (original package with its two dependencies) self.assertEqual(len(empty_repo_packages), 3) # assert dependencies package names for dependency in self.test_package_dependencies: self.assertIn(dependency, empty_repo_packages)
def do_test(self, url): """Sync a repository given ``url`` on the remote.""" repo_api = RepositoriesRpmApi(self.client) remote_api = RemotesRpmApi(self.client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=url) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) return monitor_task(sync_response.task)
def do_test(self, url=None): """Sync and publish an RPM repository. - create repository - create remote - sync the remote - create publication - create distribution Args: url(string): Optional URL of repositoy that should be use as a remote Returns (string): RPM distribution base_url. """ repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) if url: body = gen_rpm_remote(url=url) else: body = gen_rpm_remote() remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = self.publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(self.publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = self.distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = self.distributions.read(created_resources[0]) self.addCleanup(self.distributions.delete, distribution.pulp_href) return distribution.to_dict()["base_url"]
def _setup_repositories(cls): """Create and sync a number of repositories to be exported.""" # create and remember a set of repo repos = [] remotes = [] a_repo = cls.repo_api.create(gen_repo()) # give it a remote and sync it body = gen_rpm_remote() remote = cls.remote_api.create(body) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = cls.repo_api.sync(a_repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # remember it repos.append(a_repo) remotes.append(remote) return a_repo, remote
def sync(self, repository=None, remote=None, optimize=True, mirror=False): """Sync a repository and return the task. Args: repository (pulp_rpm.app.models.repository.RpmRepository): object of RPM repository remote (pulp_rpm.app.models.repository.RpmRemote): object of RPM Remote Returns (list): list of the ProgressReport objects created from this sync """ repository_sync_data = RpmRepositorySyncURL( remote=remote.pulp_href, optimize=optimize, mirror=mirror ) sync_response = self.repo_api.sync(repository.pulp_href, repository_sync_data) monitor_task(sync_response.task) return progress_reports(sync_response.task)
def do_sync(self, remote_url): """Create and sync repository with remote_url. Returns (dict): created repository url """ repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=remote_url) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) return self.repo_api.read(repo.pulp_href).to_dict()
def test_all(self): """Sync and publish an RPM repository and verify the checksum.""" # 1. create repo and remote repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote() remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # 2. Sync it repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Publish and distribute publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = self.publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(self.publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = self.distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = self.distributions.read(created_resources[0]) self.addCleanup(self.distributions.delete, distribution.pulp_href) # 4. check the tag 'sum' is not present in updateinfo.xml repomd = ElementTree.fromstring( http_get(os.path.join(distribution.base_url, "repodata/repomd.xml"))) update_xml_url = self._get_updateinfo_xml_path(repomd) update_xml_content = http_get( os.path.join(distribution.base_url, update_xml_url)) update_xml = read_xml_gz(update_xml_content) update_info_content = ElementTree.fromstring(update_xml) tags = {elem.tag for elem in update_info_content.iter()} self.assertNotIn("sum", tags, update_info_content)
def test_all(self): """Sync/publish a repo that ``updateinfo.xml`` contains references. This test targets the following issue: `Pulp #3998 <https://pulp.plan.io/issues/3998>`_. """ client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) publications = PublicationsRpmApi(client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(RPM_REFERENCES_UPDATEINFO_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) self.assertIsNotNone(repo.latest_version_href) content_summary = get_content_summary(repo.to_dict()) self.assertDictEqual(content_summary, RPM_FIXTURE_SUMMARY, content_summary) publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.assertIsNotNone(publication_href) self.addCleanup(publications.delete, publication_href)
def do_test(self, url): """Sync and publish an RPM repository given a feed URL.""" repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=url) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = self.publications.create(publish_data) created_resources = monitor_task(publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(self.publications.delete, publication_href) self.assertIsNotNone(publication_href)
def test_sync_advisory_diff_repo(self): """Test failure sync advisories. If advisory has same id, version but different update_date and no packages intersection sync should fail. Tested error_msg must be same as we use in pulp_rpm.app.advisory. NOTE: If ALLOW_AUTOMATIC_UNSAFE_ADVISORY_CONFLICT_RESOLUTION is True, this test will fail since the errata-merge will be allowed. """ body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) remote = self.remote_api.create(body) # sync repo, remote = self.do_test(remote=remote) # add remote to clean up self.addCleanup(self.remote_api.delete, remote.pulp_href) # create remote with colliding advisory body = gen_rpm_remote(RPM_ADVISORY_DIFFERENT_REPO_URL) remote = self.remote_api.create(body) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) with self.assertRaises(PulpTaskError) as exc: monitor_task(sync_response.task) task_result = exc.exception.task.to_dict() error_msg = ( "Incoming and existing advisories have the same id but different " "timestamps and non-intersecting package lists. It is likely that they are from " "two different incompatible remote repositories. E.g. RHELX-repo and " "RHELY-debuginfo repo. Ensure that you are adding content for the compatible " "repositories. Advisory id: {}".format(RPM_ADVISORY_TEST_ID)) self.assertIn(error_msg, task_result["error"]["description"])
def do_test(self, acs_url, paths, remote_url): """Sync with ACS test.""" # ACS is rpm-unsigned repository which has all packages needed acs_remote = self.remote_api.create( gen_rpm_remote(url=acs_url, policy="on_demand")) self.addCleanup(self.remote_api.delete, acs_remote.pulp_href) acs_data = { "name": "alternatecontentsource", "remote": acs_remote.pulp_href, "paths": paths, } acs = self.acs_api.create(acs_data) self.addCleanup(self.acs_api.delete, acs.pulp_href) repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) remote = self.remote_api.create(gen_rpm_remote(url=remote_url)) self.addCleanup(self.remote_api.delete, remote.pulp_href) # Sync repo with metadata only, before ACS refresh it should fail repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) with self.assertRaises(PulpTaskError) as ctx: sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) self.assertIn("404, message='Not Found'", ctx.exception.task.error["description"]) # ACS refresh acs_refresh = self.acs_api.refresh(acs.pulp_href, acs) monitor_task_group(acs_refresh.task_group) # Sync repository with metadata only sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) return self.repo_api.read(repo.pulp_href)
def sync(self, repository, remote, optimize=True, mirror=False): """Sync a repository and return the task. Args: repository (pulp_rpm.app.models.repository.RpmRepository): object of RPM repository remote (pulp_rpm.app.models.repository.RpmRemote): object of RPM Remote optimize (bool): whether to enable optimized sync mirror (bool): whether to use mirror-mode sync Returns (list): list of the ProgressReport objects created from this sync """ repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href, optimize=optimize, mirror=mirror) sync_response = self.repo_api.sync(repository.pulp_href, repository_sync_data) return monitor_task(sync_response.task)
def test_sync_from_invalid_mirror_list_feed(self): """Sync RPM content from a mirror list feed which contains an invalid remote URL.""" repo = self.repo_api.create(gen_repo()) self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") remote = self.remote_api.create(gen_rpm_remote(RPM_MIRROR_LIST_BAD_FIXTURE_URL)) remote = self.remote_api.read(remote.pulp_href) self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) try: monitor_task(sync_response.task) except PulpTaskError as exc: self.assertEqual( exc.task.to_dict()["error"]["description"], "A no valid remote URL was provided." ) else: self.fail("A task was completed without a failure.")
def test_sync_advisory_diff_repo(self): """Test failure sync advisories. If advisory has same id, version but different update_date and no packages intersection sync should fail. Tested error_msg must be same as we use in pulp_rpm.app.advisory. """ body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) remote = self.remote_api.create(body) # sync repo, remote = self.do_test(remote=remote) # add remote to clean up self.addCleanup(self.remote_api.delete, remote.pulp_href) # create remote with colliding advisory body = gen_rpm_remote(RPM_ADVISORY_DIFFERENT_REPO_URL) remote = self.remote_api.create(body) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) try: monitor_task(sync_response.task) except PulpTaskError as exc: task_result = exc.task.to_dict() error_msg = 'Incoming and existing advisories have the same id but different ' \ 'timestamps and intersecting package lists. It is likely that they are from ' \ 'two different incompatible remote repositories. E.g. RHELX-repo and ' \ 'RHELY-debuginfo repo. Ensure that you are adding content for the compatible ' \ 'repositories. Advisory id: {}'.format(RPM_ADVISORY_TEST_ID) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) self.assertIn(error_msg, task_result['error']['description'])