def test_content_remote_delete(self): """Assert that an HTTP error is raised when remote is deleted. Also verify that the content can be downloaded from Pulp once the remote is recreated and another sync is triggered. """ cfg = config.get_config() delete_orphans(cfg) client = api.Client(cfg, api.page_handler) repo = client.post(FILE_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['pulp_href']) body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) remote = client.post(FILE_REMOTE_PATH, body) # Sync the repository using a lazy download policy. sync(cfg, remote, repo) repo = client.get(repo['pulp_href']) publication = create_file_publication(cfg, repo) self.addCleanup(client.delete, publication['pulp_href']) # Delete the remote. client.delete(remote['pulp_href']) body = gen_distribution() body['publication'] = publication['pulp_href'] distribution = client.using_handler(api.task_handler).post( FILE_DISTRIBUTION_PATH, body) self.addCleanup(client.delete, distribution['pulp_href']) unit_path = choice([ content_unit['relative_path'] for content_unit in get_content(repo)[FILE_CONTENT_NAME] ]) # Assert that an HTTP error is raised when one to fetch content from # the distribution once the remote was removed. with self.assertRaises(HTTPError) as ctx: download_content_unit(cfg, distribution, unit_path) for key in ('not', 'found'): self.assertIn(key, ctx.exception.response.reason.lower()) # Recreating a remote and re-triggering a sync will cause these broken # units to recover again. body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES)) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['pulp_href']) sync(cfg, remote, repo) repo = client.get(repo['pulp_href']) content = download_content_unit(cfg, distribution, unit_path) pulp_hash = hashlib.sha256(content).hexdigest() fixtures_hash = hashlib.sha256( utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path))).hexdigest() self.assertEqual(pulp_hash, fixtures_hash)
def test_all(self): """Test content promotion for a distribution. This test targets the following issue: * `Pulp #4186 <https://pulp.plan.io/issues/4186>`_ Do the following: 1. Create a repository that has at least one repository version. 2. Create a publication. 3. Create 2 distributions - using the same publication. Those distributions will have different ``base_path``. 4. Assert that distributions have the same publication. 5. Select a content unit. Download that content unit from Pulp using the two different distributions. Assert that content unit has the same checksum when fetched from different distributions. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(FILE_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["pulp_href"]) remote = client.post(FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL)) self.addCleanup(client.delete, remote["pulp_href"]) sync(cfg, remote, repo) repo = client.get(repo["pulp_href"]) publication = create_file_publication(cfg, repo) self.addCleanup(client.delete, publication["pulp_href"]) distributions = [] for _ in range(2): body = gen_distribution() body["publication"] = publication["pulp_href"] distribution = client.using_handler(api.task_handler).post( FILE_DISTRIBUTION_PATH, body) distributions.append(distribution) self.addCleanup(client.delete, distribution["pulp_href"]) self.assertEqual(distributions[0]["publication"], distributions[1]["publication"], distributions) unit_urls = [] unit_path = get_added_content( repo)[FILE_CONTENT_NAME][0]["relative_path"] for distribution in distributions: unit_url = distribution["base_url"] unit_urls.append(urljoin(unit_url, unit_path)) client.response_handler = api.safe_handler self.assertEqual( hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(), hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(), unit_urls, )
def test_content_served(self): """Verify that content is served over publication distribution.""" repo = self.client.post(FILE_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo["pulp_href"]) remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) self.addCleanup(self.client.delete, remote["pulp_href"]) sync(self.cfg, remote, repo) repo = self.client.get(repo["pulp_href"]) publication = create_file_publication(self.cfg, repo) self.addCleanup(self.client.delete, publication["pulp_href"]) distribution = self.client.post( FILE_DISTRIBUTION_PATH, gen_distribution(publication=publication["pulp_href"])) self.addCleanup(self.client.delete, distribution["pulp_href"]) pulp_manifest = parse_pulp_manifest( self.download_pulp_manifest(distribution)) self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest) added_content = get_added_content(repo) unit_path = added_content[FILE_CONTENT_NAME][0]["relative_path"] unit_url = distribution["base_url"] unit_url = urljoin(unit_url, unit_path) pulp_hash = hashlib.sha256( self.client.get(unit_url).content).hexdigest() fixtures_hash = hashlib.sha256( utils.http_get(urljoin(FILE_URL, unit_path))).hexdigest() self.assertEqual(fixtures_hash, pulp_hash)
def serve_content_workflow(self, publish=True, bodies=None, cleanup=None): """Creates the workflow for serving content.""" all_bodies = bodies or {} repo_body = gen_repo(**all_bodies.get("repository", {})) remote_body = gen_file_remote(**all_bodies.get("remote", {})) repo = self.repo_api.create(repo_body) remote = self.remote_api.create(remote_body) sync(self.cfg, remote.to_dict(), repo.to_dict()) repo = self.repo_api.read(repo.pulp_href) if publish: pub = create_file_publication(self.cfg, repo.to_dict()) pub = self.pub_api.read(pub["pulp_href"]) dis_body = {"publication": pub.pulp_href} else: dis_body = {"repository": repo.pulp_href} distro_response = self.dis_api.create(gen_distribution(**dis_body)) distro = self.dis_api.read(monitor_task(distro_response.task).created_resources[0]) if cleanup: cleanup(self.repo_api.delete, repo.pulp_href) cleanup(self.remote_api.delete, remote.pulp_href) cleanup(self.dis_api.delete, distro.pulp_href) if publish: cleanup(self.pub_api.delete, pub.pulp_href) if publish: return repo, remote, pub, distro return repo, remote, distro
def test_create_only_using_repoversion(self): """Create a publication only using repository version.""" repo = self.create_sync_repo(3) version_href = self.client.get(repo['_versions_href'])[1]['_href'] publication = create_file_publication(self.cfg, repo, version_href) self.addCleanup(self.client.delete, publication['_href']) self.assertEqual(publication['repository_version'], version_href, publication)
def test_create_only_using_repoversion(self): """Create a publication only using repository version.""" repo = self.create_sync_repo() for file_content in get_content(repo)[FILE_CONTENT_NAME]: modify_repo(self.cfg, repo, remove_units=[file_content]) version_href = self.client.get(repo["versions_href"])[1]["pulp_href"] publication = create_file_publication(self.cfg, repo, version_href) self.addCleanup(self.client.delete, publication["pulp_href"]) self.assertEqual(publication["repository_version"], version_href, publication)
def test_01_create(self): """Create a publication distribution. Do the following: 1. Create a repository and 3 repository versions with at least 1 file content in it. Create a publication using the second repository version. 2. Create a distribution with 'publication' field set to the publication from step (1). 3. Assert the distribution got created correctly with the correct base_path, name, and publication. Assert that content guard is unset. 4. Assert that publication has a 'distributions' reference to the distribution (it's backref). """ self.repo.update(self.client.post(FILE_REPO_PATH, gen_repo())) self.remote.update(self.client.post(FILE_REMOTE_PATH, gen_file_remote())) # create 3 repository versions sync(self.cfg, self.remote, self.repo) self.repo = self.client.get(self.repo["pulp_href"]) for file_content in get_content(self.repo)[FILE_CONTENT_NAME]: modify_repo(self.cfg, self.repo, remove_units=[file_content]) self.repo = self.client.get(self.repo["pulp_href"]) versions = get_versions(self.repo) self.publication.update( create_file_publication(self.cfg, self.repo, versions[1]["pulp_href"]) ) self.distribution.update( self.client.post( FILE_DISTRIBUTION_PATH, gen_distribution(publication=self.publication["pulp_href"]) ) ) self.publication = self.client.get(self.publication["pulp_href"]) # content_guard and repository parameters unset. for key, val in self.distribution.items(): if key in ["content_guard", "repository"]: self.assertIsNone(val, self.distribution) else: self.assertIsNotNone(val, self.distribution) self.assertEqual( self.distribution["publication"], self.publication["pulp_href"], self.distribution ) self.assertEqual( self.publication["distributions"][0], self.distribution["pulp_href"], self.publication )
def test_delete_publication(self): """Delete a publication. Delete a repository version, and verify the associated publication is also deleted. """ publication = create_file_publication(self.cfg, self.repo) delete_version(self.repo) with self.assertRaises(HTTPError): self.client.get(publication["pulp_href"])
def test_all(self): """Test whether sync/publish for content already in Pulp.""" cfg = config.get_config() client = api.Client(cfg, api.page_handler) # step 1. delete orphans to assure that no content is present on disk, # or database. delete_orphans(cfg) remote = client.post(FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL)) self.addCleanup(client.delete, remote['pulp_href']) repo = client.post(FILE_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['pulp_href']) for _ in range(2): sync(cfg, remote, repo) repo = client.get(repo['pulp_href']) create_file_publication(cfg, repo)
def test_06_publication_create_order(self): """Assert that publications are ordered by created time. This test targets the following issues: * `Pulp Smash #954 <https://github.com/pulp/pulp-smash/issues/954>`_ * `Pulp #3576 <https://pulp.plan.io/issues/3576>`_ """ # Create more 2 publications for the same repo for _ in range(2): create_file_publication(self.cfg, self.repo) # Read publications publications = self.client.get(FILE_PUBLICATION_PATH) self.assertEqual(len(publications), 3) # Assert publications are ordered by pulp_created field in descending order for i, publication in enumerate(publications[:-1]): self.assertGreater( parse_date_from_string(publication["pulp_created"]), # Current parse_date_from_string(publications[i + 1]["pulp_created"]), # Prev )
def test_01_create(self): """Create a publication distribution. Do the following: 1. Create a repository and 3 repository versions with at least 1 file content in it. Create a publication using the second repository version. 2. Create a distribution with 'publication' field set to the publication from step (1). 3. Assert the distribution got created correctly with the correct base_path, name, and publication. Assert that content guard is unset. 4. Assert that publication has a 'distributions' reference to the distribution (it's backref). """ self.repo.update(self.client.post(REPO_PATH, gen_repo())) self.remote.update( self.client.post(FILE_REMOTE_PATH, gen_file_remote())) # create 3 repository versions for _ in range(3): sync(self.cfg, self.remote, self.repo) self.repo = self.client.get(self.repo['pulp_href']) versions = get_versions(self.repo) self.publication.update( create_file_publication(self.cfg, self.repo, versions[1]['pulp_href'])) self.distribution.update( self.client.post( FILE_DISTRIBUTION_PATH, gen_distribution(publication=self.publication['pulp_href']))) self.publication = self.client.get(self.publication['pulp_href']) # content_guard is the only parameter unset. for key, val in self.distribution.items(): if key == 'content_guard': self.assertIsNone(val, self.distribution) else: self.assertIsNotNone(val, self.distribution) self.assertEqual(self.distribution['publication'], self.publication['pulp_href'], self.distribution) self.assertEqual(self.publication['distributions'][0], self.distribution['pulp_href'], self.publication)
def test_delete_distribution(self): """Distribution is not removed once repository version is removed.""" repo = self.create_sync_repo(2) version_href = self.client.get(repo["versions_href"])[0]["pulp_href"] publication = create_file_publication(self.cfg, repo, version_href) distribution = self.client.post( FILE_DISTRIBUTION_PATH, gen_distribution(publication=publication["pulp_href"]) ) self.addCleanup(self.client.delete, distribution["pulp_href"]) # delete repo version used to create publication self.client.delete(version_href) updated_distribution = self.client.get(distribution["pulp_href"]) self.assertIsNone(updated_distribution["publication"], updated_distribution)
def test_delete_publication(self): """Publication is removed once the repository version is removed.""" repo = self.create_sync_repo(2) version_href = self.client.get(repo["versions_href"])[0]["pulp_href"] publication = create_file_publication(self.cfg, repo, version_href) # delete repo version used to create publication self.client.delete(version_href) with self.assertRaises(HTTPError) as ctx: self.client.get(publication["pulp_href"]) for key in ("not", "found"): self.assertIn( key, ctx.exception.response.json()["detail"].lower(), ctx.exception.response )
def test_delete_publication(self): """Publication is removed once the repository version is removed.""" repo = self.create_sync_repo(2) version_href = choice(self.client.get(repo['versions_href']))['pulp_href'] publication = create_file_publication(self.cfg, repo, version_href) # delete repo version used to create publication self.client.delete(version_href) with self.assertRaises(HTTPError) as ctx: self.client.get(publication['pulp_href']) for key in ('not', 'found'): self.assertIn( key, ctx.exception.response.json()['detail'].lower(), ctx.exception.response, )
def test_content_served(self): """Verify that content is served over publication distribution.""" repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) remote = self.client.post(FILE_REMOTE_PATH, gen_file_remote()) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.client.get(repo['_href']) publication = create_file_publication(self.cfg, repo) self.addCleanup(self.client.delete, publication['_href']) distribution = self.client.post( FILE_DISTRIBUTION_PATH, gen_distribution(publication=publication['_href']) ) self.addCleanup(self.client.delete, distribution['_href']) pulp_manifest = parse_pulp_manifest( self.download_pulp_manifest(distribution) ) self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest) added_content = get_added_content(repo) unit_path = added_content[FILE_CONTENT_NAME][0]['relative_path'] unit_url = self.cfg.get_hosts('api')[0].roles['api']['scheme'] unit_url += '://' + distribution['base_url'] + '/' unit_url = urljoin(unit_url, unit_path) pulp_hash = hashlib.sha256( self.client.using_handler(api.safe_handler).get(unit_url).content ).hexdigest() fixtures_hash = hashlib.sha256( utils.http_get(urljoin(FILE_URL, unit_path)) ).hexdigest() self.assertEqual(fixtures_hash, pulp_hash)
def test_01_create_file_publication(self): """Create a publication.""" self.publication.update(create_file_publication(self.cfg, self.repo))