def test_all(self): """Test whether sync/publish for content already in Pulp.""" cfg = config.get_config() client = api.Client(cfg, api.page_handler) # step 1. delete orphans to assure that no content is present on disk, # or database. delete_orphans(cfg) remote = client.post( FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL) ) self.addCleanup(client.delete, remote['_href']) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(client.delete, publisher['_href']) for _ in range(2): sync(cfg, remote, repo) repo = client.get(repo['_href']) publish(cfg, publisher, repo)
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() delete_orphans(cls.cfg) populate_pulp(cls.cfg, url=FILE_LARGE_FIXTURE_MANIFEST_URL) cls.client = api.Client(cls.cfg, api.page_handler) cls.content = cls.client.get(FILE_CONTENT_PATH)
def test_single_request_upload(self): """Test single request upload.""" cfg = config.get_config() # Pulp does not support single request upload for a RPM already present # in Pulp. delete_orphans(cfg) file = {'file': utils.http_get(RPM_UNSIGNED_URL)} client = api.Client(cfg, api.page_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) client.post( urljoin(BASE_PATH, 'rpm/upload/'), files=file, data={'repository': repo['_href']} ) repo = client.get(repo['_href']) # Assertion about repo version. self.assertIsNotNone(repo['_latest_version_href'], repo) # Assertions about artifcats. artifact = client.get(ARTIFACTS_PATH) self.assertEqual(len(artifact), 1, artifact) self.assertEqual( artifact[0]['sha256'], hashlib.sha256(file['file']).hexdigest(), artifact ) # Assertion about content unit. content = client.get(RPM_CONTENT_PATH) self.assertEqual(len(content), 1, content)
def setUpClass(cls): """Delete orphans and create class-wide variables.""" cfg = config.get_config() delete_orphans(cfg) cls.client = api.Client(cfg, api.json_handler) cls.file = {'file': utils.http_get(FILE_URL)} cls.file_sha256 = hashlib.sha256(cls.file['file']).hexdigest() cls.file_size = len(cls.file['file'])
def setUpClass(cls): """Create class-wide variable.""" cls.cfg = config.get_config() delete_orphans(cls.cfg) cls.content_unit = {} cls.client = api.Client(cls.cfg, api.json_handler) files = {'file': utils.http_get(RPM_SIGNED_URL)} cls.artifact = cls.client.post(ARTIFACTS_PATH, files=files)
def setUpClass(cls): """Create class-wide variables. Add content to Pulp. """ cls.cfg = config.get_config() delete_orphans(cls.cfg) cls.client = api.Client(cls.cfg, api.json_handler) populate_pulp(cls.cfg) cls.contents = cls.client.get(FILE_CONTENT_PATH)['results'][:2]
def test_clean_orphan_artifact(self): """Test whether orphan artifacts units can be clean up.""" repo = self.api_client.post(REPO_PATH, gen_repo()) self.addCleanup(self.api_client.delete, repo['_href']) files = {'file': utils.http_get(FILE2_URL)} artifact = self.api_client.post(ARTIFACTS_PATH, files=files) cmd = self.sudo + ('ls', artifact['file']) self.cli_client.run(cmd) delete_orphans() with self.assertRaises(CalledProcessError): self.cli_client.run(cmd)
def test_clean_orphan_content_unit(self): """Test whether orphan content units can be clean up. Do the following: 1. Create, and sync a repo. 2. Remove a content unit from the repo. This will create a second repository version, and create an orphan content unit. 3. Assert that content unit that was removed from the repo and its artifact are present on disk. 4. Delete orphans. 5. Assert that the orphan content unit was cleaned up, and its artifact is not present on disk. """ repo = self.api_client.post(REPO_PATH, gen_repo()) self.addCleanup(self.api_client.delete, repo['_href']) body = gen_file_remote() remote = self.api_client.post(FILE_REMOTE_PATH, body) self.addCleanup(self.api_client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.api_client.get(repo['_href']) content = choice(get_content(repo)[FILE_CONTENT_NAME]) # Create an orphan content unit. self.api_client.post( repo['_versions_href'], {'remove_content_units': [content['_href']]} ) # Verify that the artifact is present on disk. artifact_path = self.api_client.get(content['_artifact'])['file'] cmd = ('ls', artifact_path) self.cli_client.run(cmd, sudo=True) # Delete first repo version. The previous removed content unit will be # an orphan. delete_version(repo, get_versions(repo)[0]['_href']) content_units = self.api_client.get(FILE_CONTENT_PATH)['results'] self.assertIn(content, content_units) delete_orphans() content_units = self.api_client.get(FILE_CONTENT_PATH)['results'] self.assertNotIn(content, content_units) # Verify that the artifact was removed from disk. with self.assertRaises(CalledProcessError): self.cli_client.run(cmd)
def test_raise_error(self): """Create a duplicate content unit using same artifact and filename.""" delete_orphans(self.cfg) files = {'file': utils.http_get(RPM_UNSIGNED_URL)} artifact = self.client.post(ARTIFACTS_PATH, files=files) attrs = { '_artifact': artifact['_href'], 'filename': RPM_PACKAGE_FILENAME } # create first content unit. self.client.post(RPM_CONTENT_PATH, attrs) # using the same attrs used to create the first content unit. response = api.Client(self.cfg, api.echo_handler).post( RPM_CONTENT_PATH, attrs ) with self.assertRaises(HTTPError): response.raise_for_status() keywords = ( 'name', 'epoch', 'version', 'release', 'arch', 'checksum_type', 'pkgId', ) for key in keywords: self.assertIn( key.lower(), response.json()['non_field_errors'][0].lower(), response.json() )
def setUpClass(cls): """Create class-wide variable.""" delete_orphans() cls.content_unit = {} cls.python_content_api = ContentPackagesApi(gen_python_client()) cls.artifact = gen_artifact()
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication without supplying a repository_version (i.e. take the latest ``repository_version``). 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() delete_orphans(cfg) client = api.Client(cfg, api.json_handler) body = gen_remote(fixture_u1.url, cookbooks={fixture_u1.example1_name: ""}) remote = client.post(COOKBOOK_REMOTE_PATH, body) self.addCleanup(client.delete, remote["_href"]) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["_href"]) sync(cfg, remote, repo, mirror=True) repo = client.get(repo["_href"]) repo_content = get_cookbook_content(repo) self.assertTrue(repo_content) # Step 1 repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["_href"]) for cookbook in repo_content: client.post(repo["_versions_href"], {"add_content_units": [cookbook["_href"]]}) version_hrefs = tuple(ver["_href"] for ver in get_versions(repo)) non_latest = choice(version_hrefs[:-1]) # Step 2 publication = create_publication(cfg, repo) # Step 3 self.assertEqual(publication["repository_version"], version_hrefs[-1]) # Step 4 publication = create_publication(cfg, repo, version_href=non_latest) # Step 5 self.assertEqual(publication["repository_version"], non_latest) # Step 6 with self.assertRaises(HTTPError): body = { "repository": repo["_href"], "repository_version": non_latest } client.post(COOKBOOK_PUBLICATION_PATH, body)
def tearDownClass(cls): """Clean class-wide variable.""" delete_orphans()
def tearDownClass(cls): """Clean created resources.""" delete_orphans(cls.cfg)
def tearDownClass(cls): """Delete things made in setUpClass. addCleanup feature does not work with setupClass.""" cls.client.delete(cls.from_repo['pulp_href']) cls.client.delete(cls.remote['pulp_href']) delete_orphans(cls.cfg)
def setUpClass(cls): """Create class-wide variables, and clean orphan units.""" cls.cfg = config.get_config() delete_orphans(cls.cfg) cls.client = api.Client(cls.cfg, api.page_handler)
def setUpClass(cls): """Create class-wide variables and delete orphans. 1. Create a repository. 2. Create a remote pointing to external registry with policy=on_demand. 3. Sync the repository using the remote and re-read the repo data. 4. Create a docker distribution to serve the repository 5. Create another docker distribution to the serve the repository version This tests targets the following issue: * `Pulp #4460 <https://pulp.plan.io/issues/4460>`_ """ cls.cfg = config.get_config() cls.client = api.Client(cls.cfg, api.page_handler) cls.teardown_cleanups = [] delete_orphans(cls.cfg) with contextlib.ExitStack() as stack: # ensure tearDownClass runs if an error occurs here stack.callback(cls.tearDownClass) # Step 1 _repo = cls.client.post(REPO_PATH, gen_repo()) cls.teardown_cleanups.append((cls.client.delete, _repo['_href'])) # Step 2 cls.remote = cls.client.post(DOCKER_REMOTE_PATH, gen_docker_remote(policy='on_demand')) cls.teardown_cleanups.append( (cls.client.delete, cls.remote['_href'])) # Step 3 sync(cls.cfg, cls.remote, _repo) cls.repo = cls.client.get(_repo['_href']) cls.artifact_count = len(cls.client.get(ARTIFACTS_PATH)) # Step 4. response_dict = cls.client.using_handler(api.task_handler).post( DOCKER_DISTRIBUTION_PATH, gen_distribution(repository=cls.repo['_href'])) distribution_href = response_dict['_href'] cls.distribution_with_repo = cls.client.get(distribution_href) cls.teardown_cleanups.append( (cls.client.delete, cls.distribution_with_repo['_href'])) # Step 5. response_dict = cls.client.using_handler(api.task_handler).post( DOCKER_DISTRIBUTION_PATH, gen_distribution( repository_version=cls.repo['_latest_version_href'])) distribution_href = response_dict['_href'] cls.distribution_with_repo_version = cls.client.get( distribution_href) cls.teardown_cleanups.append( (cls.client.delete, cls.distribution_with_repo_version['_href'])) # remove callback if everything goes well stack.pop_all()
def tearDownClass(cls): """Cleanup class-wide variables.""" monitor_task(cls.file_repositories_api.delete(cls.repo.pulp_href).task) cls.cert_guards_api.delete(cls.content_guard.pulp_href) delete_orphans()
def setUpClass(cls): """Create class-wide variable.""" super().setUpClass() delete_orphans() cls.content_unit = {} cls.artifact = gen_artifact()
def tearDown(self): """TearDown.""" delete_orphans()
def setUpClass(cls): """Create class-wide variable.""" delete_orphans() cls.content_unit = {} cls.artifact = gen_artifact(cls.CONTENT_URL)
def setUpClass(cls): """Create class-wide variable.""" delete_orphans() cls.content_unit = {} cls.file = utils.http_get(cls.CONTENT_URL) cls.attrs = cls.gen_content_upload_attrs()
def tearDownClass(cls): """Clean generated resources.""" cls.repositories_api.delete(cls.repository.pulp_href) cls.remotes_api.delete(cls.remote.pulp_href) delete_orphans()
def rpm_publish(self, url=RPM_KICKSTART_FIXTURE_URL, policy="on_demand"): """Publish repositories with the rpm plugin. This test targets the following issue: `Pulp #5630 <https://pulp.plan.io/issues/5630>`_. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that distribution_tree units were added and are present in the repo. 6. Publish """ delete_orphans(self.cfg) repo = self.client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo["pulp_href"]) # Create a remote with the standard test fixture url. body = gen_rpm_remote(url=url, policy=policy) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote["pulp_href"]) # Sync the repository. self.assertEqual(repo["latest_version_href"], f"{repo['pulp_href']}versions/0/") data = {"remote": remote["pulp_href"]} response = self.client.using_handler(api.json_handler).post( urljoin(repo["pulp_href"], "sync/"), data ) sync_task = self.client.get(response["task"]) created_at = self.parse_date_from_string(sync_task["pulp_created"]) started_at = self.parse_date_from_string(sync_task["started_at"]) finished_at = self.parse_date_from_string(sync_task["finished_at"]) task_duration = finished_at - started_at waiting_time = started_at - created_at print( "\n-> Sync => Waiting time (s): {wait} | Service time (s): {service}".format( wait=waiting_time.total_seconds(), service=task_duration.total_seconds() ) ) repo = self.client.get(repo["pulp_href"]) # Check that we have the correct content counts. self.assertIsNotNone(repo["latest_version_href"]) self.assertIn( RPM_PACKAGE_CONTENT_NAME, get_content_summary(repo).keys(), ) self.assertIn( RPM_PACKAGE_CONTENT_NAME, get_added_content_summary(repo).keys(), ) repo = self.client.get(repo["pulp_href"]) # Publishing body = {"repository": repo["pulp_href"]} response = self.client.using_handler(api.json_handler).post(RPM_PUBLICATION_PATH, body) publish_task = self.client.get(response["task"]) created_at = self.parse_date_from_string(publish_task["pulp_created"]) started_at = self.parse_date_from_string(publish_task["started_at"]) finished_at = self.parse_date_from_string(publish_task["finished_at"]) task_duration = finished_at - started_at waiting_time = started_at - created_at print( "\n-> Publish => Waiting time (s): {wait} | Service time (s): {service}".format( wait=waiting_time.total_seconds(), service=task_duration.total_seconds() ) ) return publish_task["created_resources"][0]
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() delete_orphans(cls.cfg) cls.client = api.Client(cls.cfg, api.page_handler) cls.file = {'file': utils.http_get(RPM_UNSIGNED_URL)}
def setUpClass(cls): """Create class-wide variable.""" delete_orphans() cls.content_unit = {} cls.galaxy_content_api = ApiGalaxyCollectionsApi(gen_galaxy_client()) cls.artifact = gen_artifact()
def setUp(self): """Cleanup.""" delete_orphans()
def tearDownClass(cls): """Delete the created namespace.""" cls.namespaces_api.delete(cls.namespace.pulp_href) delete_orphans()
def rpm_sync(self, url=RPM_KICKSTART_FIXTURE_URL, policy='on_demand', check_dist_tree=True): """Sync repositories with the rpm plugin. This test targets the following issue: `Pulp #5506 <https://pulp.plan.io/issues/5506>`_. In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that distribution_tree units were added and are present in the repo. """ delete_orphans(self.cfg) repo = self.client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['pulp_href']) # Create a remote with the standard test fixture url. body = gen_rpm_remote(url=url, policy=policy) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['pulp_href']) # Sync the repository. self.assertEqual(repo["latest_version_href"], f"{repo['pulp_href']}versions/0/") data = {"remote": remote["pulp_href"]} response = self.client.using_handler(api.json_handler).post( urljoin(repo["pulp_href"], "sync/"), data) sync_task = self.client.get(response["task"]) created_at = self.parse_date_from_string(sync_task["pulp_created"]) started_at = self.parse_date_from_string(sync_task["started_at"]) finished_at = self.parse_date_from_string(sync_task["finished_at"]) task_duration = finished_at - started_at waiting_time = started_at - created_at print( "\n-> Sync => Waiting time (s): {wait} | Service time (s): {service}" .format(wait=waiting_time.total_seconds(), service=task_duration.total_seconds())) repo = self.client.get(repo['pulp_href']) for kickstart_content in get_content(repo)[RPM_KICKSTART_CONTENT_NAME]: self.addCleanup(self.client.delete, kickstart_content['pulp_href']) # Check that we have the correct content counts. self.assertIsNotNone(repo['latest_version_href']) if check_dist_tree: self.assertIn( list(RPM_KICKSTART_FIXTURE_SUMMARY.items())[0], get_content_summary(repo).items(), ) self.assertIn( list(RPM_KICKSTART_FIXTURE_SUMMARY.items())[0], get_added_content_summary(repo).items(), ) # Sync the repository again. latest_version_href = repo['latest_version_href'] response = self.client.using_handler(api.json_handler).post( urljoin(repo["pulp_href"], "sync/"), data) sync_task = self.client.get(response["task"]) created_at = self.parse_date_from_string(sync_task["pulp_created"]) started_at = self.parse_date_from_string(sync_task["started_at"]) finished_at = self.parse_date_from_string(sync_task["finished_at"]) task_duration = finished_at - started_at waiting_time = started_at - created_at print( "\n-> Re-sync => Waiting time (s): {wait} | Service time (s): {service}" .format(wait=waiting_time.total_seconds(), service=task_duration.total_seconds())) repo = self.client.get(repo['pulp_href']) # Check that nothing has changed since the last sync. self.assertEqual(latest_version_href, repo['latest_version_href'])
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() delete_orphans(cls.cfg) cls.client = api.Client(cls.cfg, api.page_handler) cls.file = {'file': utils.http_get(RPM_UNSIGNED_URL)}
def tearDownClass(cls): """Delete things made in setUpClass. addCleanup feature does not work with setupClass.""" cls.repository_api.delete(cls.from_repo.pulp_href) cls.remote_api.delete(cls.remote.pulp_href) delete_orphans()
def tearDown(self): """TearDown.""" self.repo_api.delete(self.repo.pulp_href) delete_orphans()
def setUpClass(cls): """Create class-wide variables.""" cls.cfg = config.get_config() cls.client = api.Client(cls.cfg, api.json_handler) delete_orphans(cls.cfg)
def test_repair_repository_version(self): """Test whether corrupted files can be redownloaded. Do the following: 1. Create, and sync a repo. 2. Select a content unit from the repo and change its appearance on disk. 3. Repair the RepositoryVersion. 4. Assert that the repair task reported one corrupted and one repaired unit. 5. Repair the RepositoryVersion. 6. Assert that the repair task reported none corrupted and none repaired unit. """ if settings.DEFAULT_FILE_STORAGE not in self.SUPPORTED_STORAGE_FRAMEWORKS: self.skipTest( "Cannot simulate bit-rot on this storage platform ({}).". format(settings.DEFAULT_FILE_STORAGE), ) # STEP 1 delete_orphans() repo = self.api_client.post(FILE_REPO_PATH, gen_repo()) self.addCleanup(self.api_client.delete, repo["pulp_href"]) body = gen_file_remote() remote = self.api_client.post(FILE_REMOTE_PATH, body) self.addCleanup(self.api_client.delete, remote["pulp_href"]) sync(self.cfg, remote, repo) repo = self.api_client.get(repo["pulp_href"]) # STEP 2 content1, content2 = sample(get_content(repo)[FILE_CONTENT_NAME], 2) if settings.DEFAULT_FILE_STORAGE in self.SUPPORTED_STORAGE_FRAMEWORKS: # Muddify one artifact on disk. artifact1_path = os.path.join( MEDIA_PATH, self.api_client.get(content1["artifact"])["file"]) cmd1 = ("sed", "-i", "-e", r"$a bit rot", artifact1_path) self.cli_client.run(cmd1, sudo=True) # Delete another one from disk. artifact2_path = os.path.join( MEDIA_PATH, self.api_client.get(content2["artifact"])["file"]) cmd2 = ("rm", artifact2_path) self.cli_client.run(cmd2, sudo=True) else: self.fail( "Corrupting files on this storage platform is not supported.") # STEP 3 latest_version = get_versions(repo)[-1]["pulp_href"] result = self.api_client.post(latest_version + "repair/") # STEP 4 corrupted_units_report = next( (report for report in result["progress_reports"] if report["code"] == "repair.corrupted"), None, ) self.assertEqual(corrupted_units_report["done"], 2, corrupted_units_report) repaired_units_report = next( (report for report in result["progress_reports"] if report["code"] == "repair.repaired"), None, ) self.assertEqual(repaired_units_report["done"], 2, repaired_units_report) # STEP 5 result = self.api_client.post(latest_version + "repair/") # STEP 6 corrupted_units_report = next( (report for report in result["progress_reports"] if report["code"] == "repair.corrupted"), None, ) self.assertEqual(corrupted_units_report["done"], 0, corrupted_units_report) repaired_units_report = next( (report for report in result["progress_reports"] if report["code"] == "repair.repaired"), None, ) self.assertEqual(repaired_units_report["done"], 0, repaired_units_report)
def test_clean_orphan_content_unit(self): """Test whether orphan content units can be clean up. Do the following: 1. Create, and sync a repo. 2. Remove a content unit from the repo. This will create a second repository version, and create an orphan content unit. 3. Assert that content unit that was removed from the repo and its artifact are present on disk. 4. Delete orphans. 5. Assert that the orphan content unit was cleaned up, and its artifact is not present on disk. """ repo_api = RepositoriesFileApi(self.api_client) remote_api = RemotesFileApi(self.api_client) repo = repo_api.create(gen_repo()) self.addCleanup(repo_api.delete, repo.pulp_href) body = gen_file_remote() remote = remote_api.create(body) self.addCleanup(remote_api.delete, remote.pulp_href) # Sync the repository. self.assertEqual(repo.latest_version_href, f"{repo.pulp_href}versions/0/") repository_sync_data = RepositorySyncURL(remote=remote.pulp_href) sync_response = repo_api.sync(repo.pulp_href, repository_sync_data) monitor_task(sync_response.task) repo = repo_api.read(repo.pulp_href) content = choice(get_content(repo.to_dict())[FILE_CONTENT_NAME]) # Create an orphan content unit. repo_api.modify(repo.pulp_href, dict(remove_content_units=[content["pulp_href"]])) artifacts_api = ArtifactsApi(core_client) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact is present on disk. relative_path = artifacts_api.read(content["artifact"]).file artifact_path = os.path.join(self.media_root, relative_path) cmd = ("ls", artifact_path) self.cli_client.run(cmd, sudo=True) file_contents_api = ContentFilesApi(self.api_client) # Delete first repo version. The previous removed content unit will be # an orphan. delete_version(repo, get_versions(repo.to_dict())[1]["pulp_href"]) content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertIn(content["pulp_href"], content_units_href) delete_orphans() content_units = file_contents_api.list().to_dict()["results"] content_units_href = [c["pulp_href"] for c in content_units] self.assertNotIn(content["pulp_href"], content_units_href) if self.storage == "pulpcore.app.models.storage.FileSystem": # Verify that the artifact was removed from disk. with self.assertRaises(CalledProcessError): self.cli_client.run(cmd)
def setUpClass(cls): """Create class-wide variables, and clean orphan units.""" cls.cfg = config.get_config() delete_orphans(cls.cfg) cls.client = api.Client(cls.cfg, api.page_handler)
def setUpClass(cls): """Create class-wide variable.""" delete_orphans() cls.content_unit = {} cls.chart_content_api = ContentChartApi(gen_chart_client()) cls.artifact = gen_artifact()
def test_rpm_kickstart_on_demand(self): """Sync repositories with the rpm plugin. This test targets the following issue: `Pulp #5202 <https://pulp.plan.io/issues/5202>`_ In order to sync a repository a remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository and a remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Assert that the correct number of units were added and are present in the repo. 6. Sync the remote one more time. 7. Assert that repository version is the same as the previous one. 8. Assert that the same number of packages are present """ delete_orphans(self.cfg) repo = self.client.post(RPM_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['pulp_href']) # Create a remote with the standard test fixture url. body = gen_rpm_remote( url=RPM_KICKSTART_FIXTURE_URL, policy='on_demand' ) remote = self.client.post(RPM_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote['pulp_href']) # Sync the repository. self.assertEqual(repo["latest_version_href"], f"{repo['pulp_href']}versions/0/") sync(self.cfg, remote, repo) repo = self.client.get(repo['pulp_href']) for kickstart_content in get_content(repo)[RPM_KICKSTART_CONTENT_NAME]: self.addCleanup(self.client.delete, kickstart_content['pulp_href']) # Check that we have the correct content counts. self.assertIsNotNone(repo['latest_version_href']) self.assertDictEqual( get_content_summary(repo), RPM_KICKSTART_FIXTURE_SUMMARY ) self.assertDictEqual( get_added_content_summary(repo), RPM_KICKSTART_FIXTURE_SUMMARY ) # Sync the repository again. latest_version_href = repo['latest_version_href'] sync(self.cfg, remote, repo) repo = self.client.get(repo['pulp_href']) artifacts = self.client.get(ARTIFACTS_PATH) self.assertEqual(artifacts['count'], 0, artifacts) # Check that nothing has changed since the last sync. self.assertEqual(latest_version_href, repo['latest_version_href']) self.assertDictEqual( get_content_summary(repo), RPM_KICKSTART_FIXTURE_SUMMARY )
def setUpClass(cls): """Create class-wide variables and delete orphans. 1. Create a repository. 2. Create a remote pointing to external registry with policy=on_demand. 3. Sync the repository using the remote and re-read the repo data. 4. Create a container distribution to serve the repository 5. Create another container distribution to the serve the repository version This tests targets the following issue: * `Pulp #4460 <https://pulp.plan.io/issues/4460>`_ """ cls.cfg = config.get_config() cls.registry_name = urlparse(cls.cfg.get_base_url()).netloc client_api = gen_container_client() cls.repositories_api = RepositoriesContainerApi(client_api) cls.remotes_api = RemotesContainerApi(client_api) cls.distributions_api = DistributionsContainerApi(client_api) cls.teardown_cleanups = [] delete_orphans() with contextlib.ExitStack() as stack: # ensure tearDownClass runs if an error occurs here stack.callback(cls.tearDownClass) # Step 1 _repo = cls.repositories_api.create( ContainerContainerRepository(**gen_repo())) cls.teardown_cleanups.append( (cls.repositories_api.delete, _repo.pulp_href)) # Step 2 cls.remote = cls.remotes_api.create( gen_container_remote(policy="on_demand")) cls.teardown_cleanups.append( (cls.remotes_api.delete, cls.remote.pulp_href)) # Step 3 sync_data = RepositorySyncURL(remote=cls.remote.pulp_href) sync_response = cls.repositories_api.sync(_repo.pulp_href, sync_data) monitor_task(sync_response.task) cls.repo = cls.repositories_api.read(_repo.pulp_href) cls.artifacts_api = ArtifactsApi(core_client) cls.artifact_count = cls.artifacts_api.list().count # Step 4. distribution_response = cls.distributions_api.create( ContainerContainerDistribution(**gen_distribution( repository=cls.repo.pulp_href))) created_resources = monitor_task( distribution_response.task).created_resources distribution = cls.distributions_api.read(created_resources[0]) cls.distribution_with_repo = cls.distributions_api.read( distribution.pulp_href) cls.teardown_cleanups.append( (cls.distributions_api.delete, cls.distribution_with_repo.pulp_href)) # Step 5. distribution_response = cls.distributions_api.create( ContainerContainerDistribution(**gen_distribution( repository_version=cls.repo.latest_version_href))) created_resources = monitor_task( distribution_response.task).created_resources distribution = cls.distributions_api.read(created_resources[0]) cls.distribution_with_repo_version = cls.distributions_api.read( distribution.pulp_href) cls.teardown_cleanups.append( (cls.distributions_api.delete, cls.distribution_with_repo_version.pulp_href)) # remove callback if everything goes well stack.pop_all()
def tearDownClass(cls): """Clean up after ourselves.""" cls.remote_api.delete(cls.remote.pulp_href) cls.repo_api.delete(cls.repo.pulp_href) delete_orphans(cls.cfg)
def tearDownClass(cls): """Clean class-wide variable.""" delete_orphans(cls.cfg)
def tearDownClass(cls): delete_orphans(cls.cfg)
def setUpClass(cls): """Create class-wide variable.""" delete_orphans() cls.content_unit = {} cls.rpm_content_api = ContentPackagesApi(gen_rpm_client()) cls.artifact = gen_artifact(RPM_SIGNED_URL)
def tearDownClass(cls): """Clean created resources.""" delete_orphans(cls.cfg)