def test_sync_repo_metadata_change(self): """Sync RPM modular content. This test targets sync issue when only custom metadata changes: * `Pulp #7030 <https://pulp.plan.io/issues/7030>`_ """ body = gen_rpm_remote(RPM_CUSTOM_REPO_METADATA_FIXTURE_URL) remote = self.remote_api.create(body) repo, remote = self.do_test(remote=remote) self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) body = gen_rpm_remote(RPM_CUSTOM_REPO_METADATA_CHANGED_FIXTURE_URL) remote_changed = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote_changed.pulp_href) repo, remote = self.do_test(repository=repo, remote=remote_changed) # Check if repository was updated with repository metadata self.assertEqual(repo.latest_version_href.rstrip("/")[-1], "2") self.assertTrue(PULP_TYPE_REPOMETADATA in get_added_content(repo.to_dict()))
def test_all(self): """Test of mirror mode.""" client = gen_rpm_client() repo_api = RepositoriesRpmApi(client) remote_api = RemotesRpmApi(client) # 1. create repo, remote and sync them repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_rpm_remote(url=SRPM_UNSIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 2. create another remote and re-sync body = gen_rpm_remote(url=RPM_SIGNED_FIXTURE_URL) remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href, mirror=True) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Check that only new content is present repo = repo_api.read(repo.pulp_href) self.assertDictEqual(RPM_FIXTURE_SUMMARY, get_content_summary(repo.to_dict()))
def test_sync_advisory_updated_update_date(self): """Test sync advisory with updated update_date.""" body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) remote = self.remote_api.create(body) # sync repo, remote = self.do_test(remote=remote) # add remote to clean up self.addCleanup(self.remote_api.delete, remote.pulp_href) body = gen_rpm_remote(RPM_UPDATED_UPDATEINFO_FIXTURE_URL) remote = self.remote_api.create(body) # re-sync repo, remote = self.do_test(repo, remote) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) added_advisory_date = [ advisory["updated_date"] for advisory in get_added_content(repo.to_dict())[PULP_TYPE_ADVISORY] if RPM_ADVISORY_TEST_ID_NEW in advisory["id"] ] removed_advisory_date = [ advisory["updated_date"] for advisory in get_removed_content(repo.to_dict())[PULP_TYPE_ADVISORY] if RPM_ADVISORY_TEST_ID_NEW in advisory["id"] ] self.assertGreater( parse_datetime(added_advisory_date[0]), parse_datetime(removed_advisory_date[0]) )
def _setup_repositories(cls, url=None): """Create and sync a number of repositories to be exported.""" # create and remember a set of repo import_repos = [] export_repos = [] remotes = [] for r in range(NUM_REPOS): import_repo = cls.repo_api.create(gen_repo()) export_repo = cls.repo_api.create(gen_repo()) if url: body = gen_rpm_remote(url) else: body = gen_rpm_remote() remote = cls.remote_api.create(body) repository_sync_data = RpmRepositorySyncURL( remote=remote.pulp_href) sync_response = cls.repo_api.sync(export_repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # remember it export_repos.append(export_repo) import_repos.append(import_repo) remotes.append(remote) return import_repos, export_repos, remotes
def test_sync_advisory_proper_subset_pgk_list(self): """Test success: sync advisories where pkglist is proper-subset of another. If update_dates and update_version are the same, pkglist intersection is non-empty and a proper-subset of the 'other' pkglist, sync should succeed. """ body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) remote = self.remote_api.create(body) # sync repo, remote = self.do_test(remote=remote) # add remote to clean up self.addCleanup(self.remote_api.delete, remote.pulp_href) # create remote with colliding advisory body = gen_rpm_remote(RPM_ADVISORY_INCOMPLETE_PKG_LIST_URL) remote = self.remote_api.create(body) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) try: monitor_task(sync_response.task) except Exception as e: self.fail("Unexpected exception {}".format(e.message))
def test_sync_dist_tree_change_variant_repo(self): """Test changed variant repository.""" variant_test_pkg_name = "test-srpm03" body = gen_rpm_remote(RPM_KICKSTART_FIXTURE_URL) remote = self.remote_api.create(body) # sync & update repo object repo, remote = self.do_test(remote=remote) repo = self.repo_api.read(repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) self.addCleanup(self.repo_api.delete, repo.pulp_href) # check testing package is not present self.assertNotIn( variant_test_pkg_name, [pkg["name"] for pkg in self.packages_api.list().to_dict()["results"]], ) # new remote body = gen_rpm_remote(RPM_DISTRIBUTION_TREE_CHANGED_VARIANT_URL) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # re-sync & update repo object repo, remote = self.do_test(repo, remote) repo = self.repo_api.read(repo.pulp_href) # check new pacakge is synced to subrepo self.assertIn( variant_test_pkg_name, [pkg["name"] for pkg in self.packages_api.list().to_dict()["results"]], )
def test_sync_dist_tree_change_main_repo(self): """Test changed main repository.""" main_repo_test_pkg_name = "test-srpm01" body = gen_rpm_remote(RPM_KICKSTART_FIXTURE_URL) remote = self.remote_api.create(body) # sync & update repo object repo, remote = self.do_test(remote=remote) repo = self.repo_api.read(repo.pulp_href) repo_version = repo.latest_version_href.rstrip("/")[-1] self.addCleanup(self.remote_api.delete, remote.pulp_href) self.addCleanup(self.repo_api.delete, repo.pulp_href) # new remote body = gen_rpm_remote(RPM_DISTRIBUTION_TREE_CHANGED_MAIN_URL) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # re-sync & update repo object repo, remote = self.do_test(repo, remote) repo = self.repo_api.read(repo.pulp_href) updated_repo_version = repo.latest_version_href.rstrip("/")[-1] # Assert new content was added and repo version was increased self.assertNotEqual(repo_version, updated_repo_version) self.assertIn( main_repo_test_pkg_name, [ pkg["name"] for pkg in self.packages_api.list().to_dict()["results"] ], )
def test_all(self): """Sync two copies of the same packages, make sure we end up with only one copy. Do the following: 1. Create a repository and a remote. 2. Sync the remote. 3. Assert that the content summary matches what is expected. 4. Create a new remote w/ using fixture containing updated errata (packages with the same NEVRA as the existing package content, but different pkgId) 5. Sync the remote again. 6. Assert that repository version is different from the previous one but has the same content summary. 7. Assert that the packages have changed since the last sync. """ client = api.Client(self.cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) # Create a remote with the unsigned RPM fixture url. body = gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL) remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) # Sync the repository. self.assertIsNone(repo['_latest_version_href']) sync(self.cfg, remote, repo) repo = client.get(repo['_href']) self.assertDictEqual(get_content_summary(repo), RPM_FIXTURE_CONTENT_SUMMARY) # Save a copy of the original packages. original_packages = { content['errata_id']: content for content in get_content(repo) if content['type'] == 'packages' } # Create a remote with a different test fixture with the same NEVRA but different digests. body = gen_rpm_remote(url=RPM_SIGNED_FIXTURE_URL) remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) # Sync the repository again. sync(self.cfg, remote, repo) repo = client.get(repo['_href']) self.assertDictEqual(get_content_summary(repo), RPM_FIXTURE_CONTENT_SUMMARY) self.assertEqual(len(get_added_content(repo)), 0) # Test that the packages have been modified. mutated_packages = { content['errata_id']: content for content in get_content(repo) if content['type'] == 'update' } self.assertNotEqual(mutated_packages, original_packages) self.assertNotEqual(mutated_packages[RPM_PACKAGE_NAME]['pkgId'], original_packages[RPM_PACKAGE_NAME]['pkgId'])
def test_all(self): """Sync two fixture content with same NEVRA and different checksum. Make sure we end up with the most recently synced content. Do the following: 1. Create a repository 2. Create two remotes with same content but different checksums. Sync the remotes one after the other. a. Sync remote with packages with SHA256: ``RPM_UNSIGNED_FIXTURE_URL``. b. Sync remote with packages with SHA512: ``RPM_SHA512_FIXTURE_URL``. 3. Make sure the latest content is only kept. This test targets the following issues: * `Pulp #4297 <https://pulp.plan.io/issues/4297>`_ * `Pulp #3954 <https://pulp.plan.io/issues/3954>`_ """ # Step 1 repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) # Step 2. for body in [ gen_rpm_remote(), gen_rpm_remote(url=RPM_SHA512_FIXTURE_URL) ]: remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) # Sync the repository. sync(self.cfg, remote, repo) # Step 3 repo = self.client.get(repo['_href']) added_content = get_content(repo)[RPM_PACKAGE_CONTENT_NAME] removed_content = get_removed_content(repo)[RPM_PACKAGE_CONTENT_NAME] # In case of "duplicates" the most recent one is chosen, so the old # package is removed from and the new one is added to a repo version. self.assertEqual( len(added_content), RPM_PACKAGE_COUNT, added_content ) self.assertEqual( len(removed_content), RPM_PACKAGE_COUNT, removed_content ) # Verifying whether the packages with first checksum is removed and second # is added. self.assertEqual(added_content[0]['checksum_type'], 'sha512') self.assertEqual(removed_content[0]['checksum_type'], 'sha256')
def test_all(self): """Sync two fixture content with same NEVRA and different checksum. Make sure we end up with the most recently synced content. Do the following: 1. Create a repository 2. Create two remotes with same content but different checksums. Sync the remotes one after the other. a. Sync remote with packages with SHA256: ``RPM_UNSIGNED_FIXTURE_URL``. b. Sync remote with packages with SHA512: ``RPM_SHA512_FIXTURE_URL``. 3. Make sure the latest content is only kept. This test targets the following issues: * `Pulp #4297 <https://pulp.plan.io/issues/4297>`_ * `Pulp #3954 <https://pulp.plan.io/issues/3954>`_ """ # Step 1 repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) # Step 2. for body in [ gen_rpm_remote(), gen_rpm_remote(url=RPM_SHA512_FIXTURE_URL) ]: remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) # Sync the repository. sync(self.cfg, remote, repo) # Step 3 repo = self.client.get(repo['_href']) added_content = get_content(repo)[RPM_PACKAGE_CONTENT_NAME] removed_content = get_removed_content(repo)[RPM_PACKAGE_CONTENT_NAME] # In case of "duplicates" the most recent one is chosen, so the old # package is removed from and the new one is added to a repo version. self.assertEqual( len(added_content), RPM_PACKAGES_COUNT, added_content ) self.assertEqual( len(removed_content), RPM_PACKAGES_COUNT, removed_content ) # Verifying whether the packages with first checksum is removed and second # is added. self.assertEqual(added_content[0]['checksum_type'], 'sha512') self.assertEqual(removed_content[0]['checksum_type'], 'sha256')
def do_test(self, url=None): """Sync and publish an RPM repository. - create repository - create remote - sync the remote - create publication - create distribution Args: url(string): Optional URL of repositoy that should be use as a remote Returns (string): RPM distribution base_url. """ repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) if url: body = gen_rpm_remote(url=url) else: body = gen_rpm_remote() remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) publish_data = RpmRpmPublication(repository=repo.pulp_href) publish_response = self.publications.create(publish_data) created_resources = monitor_task( publish_response.task).created_resources publication_href = created_resources[0] self.addCleanup(self.publications.delete, publication_href) body = gen_distribution() body["publication"] = publication_href distribution_response = self.distributions.create(body) created_resources = monitor_task( distribution_response.task).created_resources distribution = self.distributions.read(created_resources[0]) self.addCleanup(self.distributions.delete, distribution.pulp_href) return distribution.to_dict()["base_url"]
def do_test_remove_unit(self, remote_url): """ Sync repository and test that content can't be removed directly. """ repo = self.repo_api.create(gen_repo()) remote_body = gen_rpm_remote(remote_url, policy="on_demand") remote = self.remote_api.create(remote_body) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = self.repo_api.read(repo.pulp_href) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) # Test remove content by types contained in repository. repo_content = get_content(repo.to_dict()) base_addr = self.cfg.get_host_settings()[0]["url"] for content_type in repo_content.keys(): response = requests.delete( urljoin(base_addr, repo_content[content_type][0]["pulp_href"])) # check that '405' (method not allowed) is returned self.assertEqual(response.status_code, 405)
def test_file_decriptors(self): """Test whether file descriptors are closed properly. This test targets the following issue: `Pulp #4073 <https://pulp.plan.io/issues/4073>`_ Do the following: 1. Check if 'lsof' is installed. If it is not, skip this test. 2. Create and sync a repo. 3. Run the 'lsof' command to verify that files in the path ``/var/lib/pulp/`` are closed after the sync. 4. Assert that issued command returns `0` opened files. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) cli_client = cli.Client(cfg, cli.echo_handler) # check if 'lsof' is available if cli_client.run(('which', 'lsof')).returncode != 0: raise unittest.SkipTest('lsof package is not present') repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) remote = client.post(RPM_REMOTE_PATH, gen_rpm_remote()) self.addCleanup(client.delete, remote['_href']) sync(cfg, remote, repo) cmd = 'lsof -t +D {}'.format(MEDIA_PATH).split() response = cli_client.run(cmd).stdout self.assertEqual(len(response), 0, response)
def test_all(self): """Sync/publish a repo that ``updateinfo.xml`` contains references. This test targets the following issue: `Pulp #3998 <https://pulp.plan.io/issues/3998>`_. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) body = gen_rpm_remote(url=RPM_REFERENCES_UPDATEINFO_URL) remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) sync(cfg, remote, repo) repo = client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) content_summary = get_content_summary(repo) self.assertEqual(content_summary, RPM_FIXTURE_CONTENT_SUMMARY, content_summary) publisher = client.post(RPM_PUBLISHER_PATH, gen_rpm_publisher()) self.addCleanup(client.delete, publisher['_href']) publication = publish(cfg, publisher, repo) self.addCleanup(client.delete, publication['_href'])
def test_sync_packages_with_unsupported_checksum_type(self): """ Sync an RPM repository with an unsupported checksum (md5). This test require disallowed 'MD5' checksum type from ALLOWED_CONTENT_CHECKSUMS settings. """ # 1. create repo and remote repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(policy="on_demand", url=RPM_MD5_REPO_FIXTURE_URL) remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # 2. Sync it repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) with self.assertRaises(PulpTaskError) as ctx: monitor_task(sync_response.task) self.assertIn( "rpm-with-md5/bear-4.1-1.noarch.rpm contains forbidden checksum type", ctx.exception.task.error["description"], )
def do_test(self, policy): """Verify whether package manager can consume content from Pulp.""" client = api.Client(self.cfg, api.json_handler) body = gen_rpm_remote(policy=policy) remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) sync(self.cfg, remote, repo) publication = publish(self.cfg, repo) self.addCleanup(client.delete, publication['_href']) body = gen_distribution() body['publication'] = publication['_href'] distribution = client.using_handler(api.task_handler).post( RPM_DISTRIBUTION_PATH, body) self.addCleanup(client.delete, distribution['_href']) repo_path = gen_yum_config_file( self.cfg, baseurl=urljoin(self.cfg.get_content_host_base_url(), '//' + distribution['base_url']), name=repo['name'], repositoryid=repo['name']) cli_client = cli.Client(self.cfg) self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True) rpm_name = 'walrus' self.pkg_mgr.install(rpm_name) self.addCleanup(self.pkg_mgr.uninstall, rpm_name) rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-') self.assertEqual(rpm_name, rpm[0])
def test_all(self): """Sync/publish a repo that ``updateinfo.xml`` contains references. This test targets the following issue: `Pulp #3998 <https://pulp.plan.io/issues/3998>`_. """ repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) body = gen_rpm_remote(url=RPM_REFERENCES_UPDATEINFO_URL) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) content_summary = get_content_summary(repo) self.assertDictEqual( content_summary, RPM_FIXTURE_SUMMARY, content_summary ) publication = publish(self.cfg, repo) self.addCleanup(self.client.delete, publication['_href'])
def _setup_repos(self, remote_url=RPM_UNSIGNED_FIXTURE_URL, summary=RPM_FIXTURE_SUMMARY): """Prepare for a copy test by creating two repos and syncing. Do the following: 1. Create two repositories and a remote. 2. Sync the remote. 3. Assert that repository version is not None. 4. Assert that the correct number of units were added and are present in the repo. """ source_repo = self.client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, source_repo["pulp_href"]) dest_repo = self.client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, dest_repo["pulp_href"]) # Create a remote with the standard test fixture url. body = gen_rpm_remote(url=remote_url) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote["pulp_href"]) # Sync the repository. self.assertEqual(source_repo["latest_version_href"], f"{source_repo['pulp_href']}versions/0/") sync(self.cfg, remote, source_repo) source_repo = self.client.get(source_repo["pulp_href"]) # Check that we have the correct content counts. self.assertDictEqual(get_content_summary(source_repo), summary) self.assertDictEqual(get_added_content_summary(source_repo), summary) return source_repo, dest_repo
def do_test(self, policy): """Verify whether package manager can consume content from Pulp.""" client = api.Client(self.cfg, api.json_handler) body = gen_rpm_remote(policy=policy) remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['pulp_href']) repo = client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['pulp_href']) sync(self.cfg, remote, repo) publication = publish(self.cfg, repo) self.addCleanup(client.delete, publication['pulp_href']) body = gen_distribution() body['publication'] = publication['pulp_href'] distribution = client.using_handler(api.task_handler).post( RPM_DISTRIBUTION_PATH, body) self.addCleanup(client.delete, distribution['pulp_href']) cli_client = cli.Client(self.cfg) cli_client.run(('sudo', 'dnf', 'config-manager', '--add-repo', distribution['base_url'])) repo_id = '*{}_'.format(distribution['base_path']) cli_client.run(('sudo', 'dnf', 'config-manager', '--save', '--setopt={}.gpgcheck=0'.format(repo_id), repo_id)) self.addCleanup( cli_client.run, ('sudo', 'dnf', 'config-manager', '--disable', repo_id)) rpm_name = 'walrus' self.pkg_mgr.install(rpm_name) self.addCleanup(self.pkg_mgr.uninstall, rpm_name) rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-') self.assertEqual(rpm_name, rpm[0])
def create_distribution(self, gpgcheck=0, repo_gpgcheck=0, has_signing_service=True): """Create a distribution with a repository that contains a signing service.""" repo_params = {} if has_signing_service: repo_params["metadata_signing_service"] = self.metadata_signing_service["pulp_href"] repo = self.api_client.post(RPM_REPO_PATH, gen_repo(**repo_params)) self.addCleanup(self.api_client.delete, repo["pulp_href"]) remote = self.api_client.post(RPM_REMOTE_PATH, gen_rpm_remote()) self.addCleanup(self.api_client.delete, remote["pulp_href"]) sync(self.cfg, remote, repo) repo = self.api_client.get(repo["pulp_href"]) self.assertIsNotNone(repo["latest_version_href"]) publication = publish(self.cfg, repo, gpgcheck=gpgcheck, repo_gpgcheck=repo_gpgcheck) self.addCleanup(self.api_client.delete, publication["pulp_href"]) body = gen_distribution() body["publication"] = publication["pulp_href"] distribution = self.api_client.using_handler(api.task_handler).post( RPM_DISTRIBUTION_PATH, body ) self.addCleanup(self.api_client.delete, distribution["pulp_href"]) return distribution
def test_publish_with_unsupported_checksum_type(self): """ Sync and try publish an RPM repository. - Sync repository with on_demand policy - Try to publish with 'md5' checksum type - Publish should fail because 'md5' is not allowed This test require disallowed 'MD5' checksum type from ALLOWED_CONTENT_CHECKSUMS settings. """ # 1. create repo and remote repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) body = gen_rpm_remote(policy="on_demand") remote = self.remote_api.create(body) self.addCleanup(self.remote_api.delete, remote.pulp_href) # 2. Sync it repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) # 3. Publish and fail publish_data = RpmRpmPublication(repository=repo.pulp_href, package_checksum_type="md5") with self.assertRaises(ApiException) as ctx: self.publications.create(publish_data) self.assertIn("Checksum must be one of the allowed checksum types.", ctx.exception.body)
def create_distribution(self): """Create a distribution with a repository that contains a signing service.""" repo = self.api_client.post( RPM_REPO_PATH, gen_repo(metadata_signing_service=self. metadata_signing_service['pulp_href'])) self.addCleanup(self.api_client.delete, repo['pulp_href']) remote = self.api_client.post(RPM_REMOTE_PATH, gen_rpm_remote()) self.addCleanup(self.api_client.delete, remote['pulp_href']) sync(self.cfg, remote, repo) repo = self.api_client.get(repo['pulp_href']) self.assertIsNotNone(repo['latest_version_href']) publication = publish(self.cfg, repo) self.addCleanup(self.api_client.delete, publication['pulp_href']) body = gen_distribution() body['publication'] = publication['pulp_href'] distribution = self.api_client.using_handler(api.task_handler).post( RPM_DISTRIBUTION_PATH, body) self.addCleanup(self.api_client.delete, distribution['pulp_href']) return distribution
def test_checksum_constraint(self): """Verify checksum constraint test case. Do the following: 1. Create and sync a repo using the following url=RPM_REFERENCES_UPDATEINFO_URL. 2. Create and sync a secondary repo using the following url=RPM_UNSIGNED_FIXTURE_URL. Those urls have RPM packages with the same name. 3. Assert that the task succeed. This test targets the following issue: * `Pulp #4170 <https://pulp.plan.io/issues/4170>`_ * `Pulp #4255 <https://pulp.plan.io/issues/4255>`_ """ for repository in [RPM_REFERENCES_UPDATEINFO_URL, RPM_UNSIGNED_FIXTURE_URL]: body = gen_rpm_remote(repository) remote = self.remote_api.create(body) repo, remote = self.do_test(remote=remote) self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) self.assertDictEqual(get_content_summary(repo.to_dict()), RPM_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), RPM_FIXTURE_SUMMARY)
def do_test(self, repository=None, remote=None): """Sync a repository. Args: repository (pulp_rpm.app.models.repository.RpmRepository): object of RPM repository remote (pulp_rpm.app.models.repository.RpmRemote): object of RPM Remote Returns (tuple): tuple of instances of pulp_rpm.app.models.repository.RpmRepository, pulp_rpm.app.models.repository.RpmRemote """ if repository: repo = self.repo_api.read(repository.pulp_href) else: repo = self.repo_api.create(gen_repo()) self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") if not remote: body = gen_rpm_remote() remote = self.remote_api.create(body) else: remote = self.remote_api.read(remote.pulp_href) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) return self.repo_api.read(repo.pulp_href), self.remote_api.read( remote.pulp_href)
def test_all(self): """Sync and publish an RPM repository and verify the checksum.""" # Step 1 repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) remote = self.client.post(RPM_REMOTE_PATH, gen_rpm_remote()) self.addCleanup(self.client.delete, remote['_href']) # Step 2 sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) # Step 3 publication = publish(self.cfg, repo) self.addCleanup(self.client.delete, publication['_href']) body = gen_distribution() body['publication'] = publication['_href'] distribution = self.client.using_handler(api.task_handler).post( RPM_DISTRIBUTION_PATH, body) self.addCleanup(self.client.delete, distribution['_href']) # Step 4 repo_md = ElementTree.fromstring( download_content_unit(self.cfg, distribution, 'repodata/repomd.xml')) update_info_content = ElementTree.fromstring( download_content_unit(self.cfg, distribution, self._get_updateinfo_xml_path(repo_md))) tags = {elem.tag for elem in update_info_content.iter()} self.assertNotIn('sum', tags, update_info_content)
def test_all(self): """Sync/publish a repo that ``updateinfo.xml`` contains references. This test targets the following issue: `Pulp #3998 <https://pulp.plan.io/issues/3998>`_. """ repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) body = gen_rpm_remote(url=RPM_REFERENCES_UPDATEINFO_URL) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) content_summary = get_content_summary(repo) self.assertDictEqual(content_summary, RPM_FIXTURE_SUMMARY, content_summary) publication = publish(self.cfg, repo) self.addCleanup(self.client.delete, publication['_href'])
def test_all(self): """Test whether content unit used by a repo version can be deleted. Do the following: 1. Sync content to a repository. 2. Attempt to delete a content unit present in a repository version. Assert that a HTTP exception was raised. 3. Assert that number of content units present on the repository does not change after the attempt to delete one content unit. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) body = gen_rpm_remote() remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) sync(cfg, remote, repo) repo = client.get(repo['_href']) content = get_content(repo) with self.assertRaises(HTTPError): client.delete(choice(content)['_href']) self.assertEqual(len(content), len(get_content(repo)))
def _sync(url=None): repo = cls.repo_api.create(gen_repo()) remote = cls.remote_api.create(gen_rpm_remote(url)) repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href) sync_response = cls.repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) return cls.repo_api.read(repo.pulp_href)
def test_rpm_kickstart(self): """Sync repositories with the rpm plugin. This test targets the following issue: `Pulp #5202 <https://pulp.plan.io/issues/5202>`_ In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is the same the previous one. 8. Assert that the same number of packages are present. """ repo = self.client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['pulp_href']) # Create a remote with the standard test fixture url. body = gen_rpm_remote(url=RPM_KICKSTART_FIXTURE_URL) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['pulp_href']) # Sync the repository. self.assertEqual(repo["latest_version_href"], f"{repo['pulp_href']}versions/0/") sync(self.cfg, remote, repo) repo = self.client.get(repo['pulp_href']) for kickstart_content in get_content(repo)[RPM_KICKSTART_CONTENT_NAME]: self.addCleanup(self.client.delete, kickstart_content['pulp_href']) # Check that we have the correct content counts. self.assertIsNotNone(repo['latest_version_href']) self.assertDictEqual(get_content_summary(repo), RPM_KICKSTART_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo), RPM_KICKSTART_FIXTURE_SUMMARY) # Sync the repository again. latest_version_href = repo['latest_version_href'] sync(self.cfg, remote, repo) repo = self.client.get(repo['pulp_href']) artifacts = self.client.get(ARTIFACTS_PATH) self.assertEqual(artifacts['count'], 4, artifacts) # Check that nothing has changed since the last sync. self.assertEqual(latest_version_href, repo['latest_version_href']) self.assertDictEqual(get_content_summary(repo), RPM_KICKSTART_FIXTURE_SUMMARY)
def test_file_decriptors(self): """Test whether file descriptors are closed properly. This test targets the following issue: `Pulp #4073 <https://pulp.plan.io/issues/4073>`_ Do the following: 1. Check if 'lsof' is installed. If it is not, skip this test. 2. Create and sync a repo. 3. Run the 'lsof' command to verify that files in the path ``/var/lib/pulp/`` are closed after the sync. 4. Assert that issued command returns `0` opened files. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) cli_client = cli.Client(cfg, cli.echo_handler) # check if 'lsof' is available if cli_client.run(('which', 'lsof')).returncode != 0: raise unittest.SkipTest('lsof package is not present') repo = client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['pulp_href']) remote = client.post(RPM_REMOTE_PATH, gen_rpm_remote()) self.addCleanup(client.delete, remote['pulp_href']) sync(cfg, remote, repo) cmd = 'lsof -t +D {}'.format(MEDIA_PATH).split() response = cli_client.run(cmd).stdout self.assertEqual(len(response), 0, response)
def test_sync_diff_checksum_packages(self): """Sync two fixture content with same NEVRA and different checksum. Make sure we end up with the most recently synced content. Do the following: 1. Create two remotes with same content but different checksums. Sync the remotes one after the other. a. Sync remote with packages with SHA256: ``RPM_UNSIGNED_FIXTURE_URL``. b. Sync remote with packages with SHA512: ``RPM_SHA512_FIXTURE_URL``. 2. Make sure the latest content is only kept. This test targets the following issues: * `Pulp #4297 <https://pulp.plan.io/issues/4297>`_ * `Pulp #3954 <https://pulp.plan.io/issues/3954>`_ """ body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL) remote = self.remote_api.create(body) # sync with SHA256 repo, remote = self.do_test(remote=remote) body = gen_rpm_remote(RPM_SHA512_FIXTURE_URL) remote = self.remote_api.create(body) # re-sync with SHA512 repo, remote = self.do_test(repo, remote) # add resources to clean up self.addCleanup(self.repo_api.delete, repo.pulp_href) self.addCleanup(self.remote_api.delete, remote.pulp_href) added_content = get_content(repo.to_dict())[RPM_PACKAGE_CONTENT_NAME] removed_content = get_removed_content( repo.to_dict())[RPM_PACKAGE_CONTENT_NAME] # In case of "duplicates" the most recent one is chosen, so the old # package is removed from and the new one is added to a repo version. self.assertEqual(len(added_content), RPM_PACKAGE_COUNT) self.assertEqual(len(removed_content), RPM_PACKAGE_COUNT) # Verifying whether the packages with first checksum is removed and second # is added. self.assertEqual(added_content[0]['checksum_type'], 'sha512') self.assertEqual(removed_content[0]['checksum_type'], 'sha256')
def test_sync_with_retention_and_modules(self): """Verify functionality with sync. Do the following: 1. Create a repository, and a remote. 2. Sync the remote. 3. Assert that the correct number of units were added and are present in the repo. 4. Change the "retain_package_versions" on the repository to 1 (retain the latest version only). 5. Sync the remote one more time. 6. Assert that repository version is the same as the previous one, because the older versions are part of modules, and they should be ignored by the retention policy. """ delete_orphans() repo = self.repo_api.create(gen_repo()) self.addCleanup(self.repo_api.delete, repo.pulp_href) remote = self.remote_api.create( gen_rpm_remote( url=RPM_MODULES_STATIC_CONTEXT_FIXTURE_URL, policy="on_demand", )) self.addCleanup(self.remote_api.delete, remote.pulp_href) task = self.sync(repository=repo, remote=remote, optimize=False) repo = self.repo_api.read(repo.pulp_href) self.addCleanup(delete_orphans) # TODO: #2587 # Test that, by default, everything is retained / nothing is tossed out. self.assertDictEqual(get_content_summary(repo.to_dict()), RPM_MODULAR_STATIC_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo.to_dict()), RPM_MODULAR_STATIC_FIXTURE_SUMMARY) # Test that the # of packages processed is correct reports = self.get_progress_reports_by_code(task) self.assertEqual(reports["sync.parsing.packages"].total, RPM_MODULAR_PACKAGE_COUNT) self.assertEqual(reports["sync.skipped.packages"].total, 0) # Set the retention policy to retain only 1 version of each package repo_data = repo.to_dict() repo_data.update({"retain_package_versions": 1}) self.repo_api.update(repo.pulp_href, repo_data) repo = self.repo_api.read(repo.pulp_href) task = self.sync(repository=repo, remote=remote, optimize=False) repo = self.repo_api.read(repo.pulp_href) # Test that no RPMs were removed (and no advisories etc. touched) # it should be the same because the older version are covered by modules) self.assertDictEqual(get_removed_content_summary(repo.to_dict()), {}) # Test that the number of packages processed is correct reports = self.get_progress_reports_by_code(task) self.assertEqual(reports["sync.parsing.packages"].total, RPM_MODULAR_PACKAGE_COUNT) self.assertEqual(reports["sync.skipped.packages"].total, 0)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) body = gen_rpm_remote() remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) sync(cfg, remote, repo) publisher = client.post(RPM_PUBLISHER_PATH, gen_rpm_publisher()) self.addCleanup(client.delete, publisher['_href']) # Step 1 repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) for rpm_content in client.get(RPM_CONTENT_PATH)['results']: client.post( repo['_versions_href'], {'add_content_units': [rpm_content['_href']]} ) version_hrefs = tuple(ver['_href'] for ver in get_versions(repo)) non_latest = choice(version_hrefs[:-1]) # Step 2 publication = publish(cfg, publisher, repo) # Step 3 self.assertEqual(publication['repository_version'], version_hrefs[-1]) # Step 4 publication = publish(cfg, publisher, repo, non_latest) # Step 5 self.assertEqual(publication['repository_version'], non_latest) # Step 6 with self.assertRaises(HTTPError): body = { 'repository': repo['_href'], 'repository_version': non_latest } client.post(urljoin(publisher['_href'], 'publish/'), body)
def test_rpm(self): """Sync repositories with the rpm plugin. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is different from the previous one. 8. Assert that the same number of are present and that no units were added. """ repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) # Create a remote with the standard test fixture url. body = gen_rpm_remote() remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) # Sync the repository. self.assertIsNone(repo['_latest_version_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) # Check that we have the correct content counts. self.assertIsNotNone(repo['_latest_version_href']) self.assertDictEqual( get_content_summary(repo), RPM_FIXTURE_SUMMARY ) self.assertDictEqual( get_added_content_summary(repo), RPM_FIXTURE_SUMMARY ) # Sync the repository again. latest_version_href = repo['_latest_version_href'] sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) # Check that nothing has changed since the last sync. self.assertNotEqual(latest_version_href, repo['_latest_version_href']) self.assertDictEqual( get_content_summary(repo), RPM_FIXTURE_SUMMARY ) self.assertDictEqual(get_added_content_summary(repo), {})
def test_02_create_same_name(self): """Try to create a second remote with an identical name. See: `Pulp Smash #1055 <https://github.com/PulpQE/pulp-smash/issues/1055>`_. """ body = gen_rpm_remote() body['name'] = self.remote['name'] with self.assertRaises(HTTPError): self.client.post(RPM_REMOTE_PATH, body)
def test_all(self): """Verify whether is possible to create a remote without a URL. This test targets the following issues: * `Pulp #3395 <https://pulp.plan.io/issues/3395>`_ * `Pulp Smash #984 <https://github.com/PulpQE/pulp-smash/issues/984>`_ """ body = gen_rpm_remote() del body['url'] with self.assertRaises(HTTPError): api.Client(config.get_config()).post(RPM_REMOTE_PATH, body)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ body = gen_rpm_remote() remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) sync(self.cfg, remote, repo) # Step 1 repo = self.client.get(repo['_href']) for rpm_content in get_content(repo)[RPM_PACKAGE_CONTENT_NAME]: self.client.post( repo['_versions_href'], {'add_content_units': [rpm_content['_href']]} ) version_hrefs = tuple(ver['_href'] for ver in get_versions(repo)) non_latest = choice(version_hrefs[:-1]) # Step 2 publication = publish(self.cfg, repo) # Step 3 self.assertEqual(publication['repository_version'], version_hrefs[-1]) # Step 4 publication = publish(self.cfg, repo, non_latest) # Step 5 self.assertEqual(publication['repository_version'], non_latest) # Step 6 with self.assertRaises(HTTPError): body = { 'repository': repo['_href'], 'repository_version': non_latest } self.client.post(RPM_PUBLICATION_PATH, body)
def test_all(self): """Verify whether package manager can consume content from Pulp. This test targets the following issue: `Pulp #3204 <https://pulp.plan.io/issues/3204>`_ """ cfg = config.get_config() try: cli.PackageManager._get_package_manager(cfg) # pylint:disable=protected-access except NoKnownPackageManagerError: raise unittest.SkipTest('This test requires dnf or yum.') client = api.Client(cfg, api.json_handler) body = gen_rpm_remote() remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) sync(cfg, remote, repo) publisher = client.post(RPM_PUBLISHER_PATH, gen_rpm_publisher()) self.addCleanup(client.delete, publisher['_href']) publication = publish(cfg, publisher, repo) self.addCleanup(client.delete, publication['_href']) body = gen_distribution() body['publication'] = publication['_href'] distribution = client.post(DISTRIBUTION_PATH, body) self.addCleanup(client.delete, distribution['_href']) repo_path = gen_yum_config_file( cfg, baseurl=urljoin(cfg.get_base_url(), urljoin( 'pulp/content/', distribution['base_path'] )), name=repo['name'], repositoryid=repo['name'] ) cli_client = cli.Client(cfg) self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True) rpm_name = 'walrus' pkg_mgr = cli.PackageManager(cfg) pkg_mgr.install(rpm_name) self.addCleanup(pkg_mgr.uninstall, rpm_name) rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-') self.assertEqual(rpm_name, rpm[0])
def do_test(self, url): """Sync and publish an RPM repository given a feed URL.""" repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) remote = self.client.post(RPM_REMOTE_PATH, gen_rpm_remote(url=url)) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) publication = publish(self.cfg, repo) self.addCleanup(self.client.delete, publication['_href'])
def do_publish(self, download_policy): """Publish repository synced with lazy ``download_policy``.""" repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) remote = self.client.post( RPM_REMOTE_PATH, gen_rpm_remote(policy=download_policy) ) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) publication = publish(self.cfg, repo) self.assertIsNotNone(publication['repository_version'], publication)
def do_publish(self, download_policy): """Publish repository synced with lazy ``download_policy``.""" repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) remote = self.client.post( RPM_REMOTE_PATH, gen_rpm_remote(policy=download_policy) ) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) publisher = self.client.post(RPM_PUBLISHER_PATH, gen_rpm_publisher()) self.addCleanup(self.client.delete, publisher['_href'])
def do_sync(self, download_policy): """Sync repositories with the different ``download_policy``. Do the following: 1. Create a repository, and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of possible units to be downloaded were shown. 6. Sync the remote one more time in order to create another repository version. 7. Assert that repository version is different from the previous one. 8. Assert that the same number of units are shown, and after the second sync no extra units should be shown, since the same remote was synced again. """ repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) remote = self.client.post( RPM_REMOTE_PATH, gen_rpm_remote(policy=download_policy) ) self.addCleanup(self.client.delete, remote['_href']) # Sync the repository. self.assertIsNone(repo['_latest_version_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) self.assertDictEqual(get_content_summary(repo), RPM_FIXTURE_SUMMARY) self.assertDictEqual( get_added_content_summary(repo), RPM_FIXTURE_SUMMARY ) # Sync the repository again. latest_version_href = repo['_latest_version_href'] sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertNotEqual(latest_version_href, repo['_latest_version_href']) self.assertDictEqual(get_content_summary(repo), RPM_FIXTURE_SUMMARY) self.assertDictEqual(get_added_content_summary(repo), {})
def _gen_verbose_remote(): """Return a semi-random dict for use in defining a remote. For most tests, it's desirable to create remotes with as few attributes as possible, so that the tests can specifically target and attempt to break specific features. This module specifically targets remotes, so it makes sense to provide as many attributes as possible. Note that 'username' and 'password' are write-only attributes. """ attrs = gen_rpm_remote() attrs.update({ 'password': utils.uuid4(), 'username': utils.uuid4(), 'validate': choice((False, True)), }) return attrs
def test_all(self): """Sync and publish an RPM repository and verify the checksum.""" # Step 1 repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) remote = self.client.post(RPM_REMOTE_PATH, gen_rpm_remote()) self.addCleanup(self.client.delete, remote['_href']) # Step 2 sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) # Step 3 publication = publish(self.cfg, repo) self.addCleanup(self.client.delete, publication['_href']) body = gen_distribution() body['publication'] = publication['_href'] distribution = self.client.using_handler(api.task_handler).post( DISTRIBUTION_PATH, body ) self.addCleanup(self.client.delete, distribution['_href']) # Step 4 repo_md = ElementTree.fromstring( download_content_unit(self.cfg, distribution, 'repodata/repomd.xml') ) update_info_content = ElementTree.fromstring( download_content_unit( self.cfg, distribution, self._get_updateinfo_xml_path(repo_md) ) ) tags = {elem.tag for elem in update_info_content.iter()} self.assertNotIn('sum', tags, update_info_content)
def test_sync_large_repo(self): """Sync large EPEL repository.""" cfg = config.get_config() client = api.Client(cfg, api.page_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) remote = client.post( RPM_REMOTE_PATH, gen_rpm_remote(url=RPM_EPEL_URL) ) self.addCleanup(client.delete, remote['_href']) # Sync the repository. self.assertIsNone(repo['_latest_version_href']) sync(cfg, remote, repo) repo = client.get(repo['_href']) content_summary = get_content_summary(repo) self.assertGreater( content_summary[RPM_PACKAGE_CONTENT_NAME], 0, content_summary )
def test_all(self): """Sync two copies of the same UpdateRecords. Make sure we end up with only one copy. Do the following: 1. Create a repository and a remote. 2. Sync the remote. 3. Assert that the content summary matches what is expected. 4. Create a new remote w/ using fixture containing updated errata (updaterecords with the ID as the existing updaterecord content, but different metadata). 5. Sync the remote again. 6. Assert that repository version is different from the previous one but has the same content summary. 7. Assert that the updaterecords have changed since the last sync. """ client = api.Client(self.cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) # Create a remote with the unsigned RPM fixture url. # We need to use the unsigned fixture because the one used down below # has unsigned RPMs. Signed and unsigned units have different hashes, # so they're seen as different units. body = gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL) remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) # Sync the repository. self.assertIsNone(repo['_latest_version_href']) sync(self.cfg, remote, repo) repo = client.get(repo['_href']) self.assertDictEqual( get_content_summary(repo), RPM_FIXTURE_SUMMARY ) # Save a copy of the original updateinfo original_updaterecords = { content['id']: content for content in get_content(repo)[RPM_UPDATE_CONTENT_NAME] } # Create a remote with a different test fixture, one containing mutated # updateinfo. body = gen_rpm_remote(url=RPM_UPDATED_UPDATEINFO_FIXTURE_URL) remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) # Sync the repository again. sync(self.cfg, remote, repo) repo = client.get(repo['_href']) self.assertDictEqual( get_content_summary(repo), RPM_FIXTURE_SUMMARY ) self.assertEqual( len(get_added_content(repo)[RPM_UPDATE_CONTENT_NAME]), 4 ) self.assertEqual( len(get_removed_content(repo)[RPM_UPDATE_CONTENT_NAME]), 4 ) # Test that the updateinfo have been modified. mutated_updaterecords = { content['id']: content for content in get_content(repo)[RPM_UPDATE_CONTENT_NAME] } self.assertNotEqual(mutated_updaterecords, original_updaterecords) self.assertEqual( mutated_updaterecords[RPM_UPDATERECORD_ID]['description'], 'Updated Gorilla_Erratum and the updated date contains timezone', mutated_updaterecords[RPM_UPDATERECORD_ID] )
def test_all(self): """Verify whether content served by pulp can be downloaded. The process of publishing content is more involved in Pulp 3 than it was under Pulp 2. Given a repository, the process is as follows: 1. Create a publication from the repository. (The latest repository version is selected if no version is specified.) A publication is a repository version plus metadata. 2. Create a distribution from the publication. The distribution defines at which URLs a publication is available, e.g. ``http://example.com/content/foo/`` and ``http://example.com/content/bar/``. Do the following: 1. Create, populate, publish, and distribute a repository. 2. Select a random content unit in the distribution. Download that content unit from Pulp, and verify that the content unit has the same checksum when fetched directly from Pulp-Fixtures. This test targets the following issues: * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_ * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_ """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) body = gen_rpm_remote() remote = client.post(RPM_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) sync(cfg, remote, repo) repo = client.get(repo['_href']) # Create a publication. publication = publish(cfg, repo) self.addCleanup(client.delete, publication['_href']) # Create a distribution. body = gen_distribution() body['publication'] = publication['_href'] distribution = client.using_handler(api.task_handler).post( DISTRIBUTION_PATH, body ) self.addCleanup(client.delete, distribution['_href']) # Pick a content unit, and download it from both Pulp Fixtures… unit_path = choice(get_rpm_package_paths(repo)) fixtures_hash = hashlib.sha256( utils.http_get(urljoin(RPM_UNSIGNED_FIXTURE_URL, unit_path)) ).hexdigest() # …and Pulp. content = download_content_unit(cfg, distribution, unit_path) pulp_hash = hashlib.sha256(content).hexdigest() self.assertEqual(fixtures_hash, pulp_hash)
def test_all(self): """Sync two copies of the same packages. Make sure we end up with only one copy. Do the following: 1. Create a repository and a remote. 2. Sync the remote. 3. Assert that the content summary matches what is expected. 4. Create a new remote w/ using fixture containing updated errata (packages with the same NEVRA as the existing package content, but different pkgId). 5. Sync the remote again. 6. Assert that repository version is different from the previous one but has the same content summary. 7. Assert that the packages have changed since the last sync. """ repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) # Create a remote with the unsigned RPM fixture url. body = gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) # Sync the repository. self.assertIsNone(repo['_latest_version_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertDictEqual( get_content_summary(repo), RPM_FIXTURE_SUMMARY ) # Save a copy of the original packages. original_packages = { (content['name'], content['epoch'], content['version'], content['release'], content['arch']): content for content in get_content(repo)[RPM_PACKAGE_CONTENT_NAME] } # Create a remote with a different test fixture with the same NEVRA but # different digests. body = gen_rpm_remote(url=RPM_SIGNED_FIXTURE_URL) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['_href']) # Sync the repository again. sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) self.assertDictEqual( get_content_summary(repo), RPM_FIXTURE_SUMMARY ) # In case of "duplicates" the most recent one is chosen, so the old # package is removed from and the new one is added to a repo version. self.assertEqual( len(get_added_content(repo)[RPM_PACKAGE_CONTENT_NAME]), RPM_PACKAGES_COUNT, get_added_content(repo)[RPM_PACKAGE_CONTENT_NAME] ) self.assertEqual( len(get_removed_content(repo)[RPM_PACKAGE_CONTENT_NAME]), RPM_PACKAGES_COUNT, get_removed_content(repo)[RPM_PACKAGE_CONTENT_NAME] ) # Test that the packages have been modified. mutated_packages = { (content['name'], content['epoch'], content['version'], content['release'], content['arch']): content for content in get_content(repo)[RPM_PACKAGE_CONTENT_NAME] } for nevra in original_packages: with self.subTest(pkg=nevra): self.assertNotEqual( original_packages[nevra]['pkgId'], mutated_packages[nevra]['pkgId'], original_packages[nevra]['pkgId'] )