def test_gen_remote(self): """Asserts the generation of remote dict.""" self.assertIn('url', gen_remote('http://foo.com')) self.assertIn('name', gen_remote('http://foo.com')) self.assertEqual('http://foo.com', gen_remote('http://foo.com')['url']) rem = gen_remote('http://fooremote', name='fooremote') self.assertIn('url', rem) self.assertEqual('http://fooremote', rem['url']) self.assertEqual('fooremote', rem['name'])
def test_gen_remote(self): """Tests the generation of a remote dict.""" self.assertIn("url", gen_remote("http://foo.com")) self.assertIn("name", gen_remote("http://foo.com")) self.assertEqual("http://foo.com", gen_remote("http://foo.com")["url"]) rem = gen_remote("http://fooremote", name="fooremote") self.assertIn("url", rem) self.assertEqual("http://fooremote", rem["url"]) self.assertEqual("fooremote", rem["name"])
def test_sync_multiple_plugins(self): """Sync a repo using remotes from different plugins. This test targets the following issue: `Pulp #4274 <https://pulp.plan.io/issues/4274>`_ """ repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['pulp_href']) rpm_remote = self.client.post(RPM_REMOTE_PATH, gen_remote(url=RPM_UNSIGNED_FIXTURE_URL)) self.addCleanup(self.client.delete, rpm_remote['pulp_href']) file_remote = self.client.post( FILE_REMOTE_PATH, gen_remote(url=FILE_FIXTURE_MANIFEST_URL)) self.addCleanup(self.client.delete, file_remote['pulp_href']) docker_remote = self.client.post( DOCKER_REMOTE_PATH, gen_remote(url=DOCKER_V2_FEED_URL, upstream_name=DOCKER_UPSTREAM_NAME)) self.addCleanup(self.client.delete, docker_remote['pulp_href']) remotes = [file_remote, docker_remote, rpm_remote] shuffle(remotes) for remote in remotes: sync(self.cfg, remote, repo) repo = self.client.get(repo['pulp_href']) content_keys = sorted([ DOCKER_CONTENT_BLOB_NAME, DOCKER_CONTENT_MANIFEST_NAME, DOCKER_CONTENT_TAG_NAME, FILE_CONTENT_NAME, RPM_PACKAGE_CONTENT_NAME, RPM_ADVISORY_CONTENT_NAME, ]) content = get_content_summary(repo) self.assertEqual(len(content), len(content_keys), content) # Assert that all expected keys for different plugins are present. self.assertEqual(content_keys, sorted([key for key in content.keys()]), content) # Assert that sync the content was synced properly. for value in content.values(): with self.subTest(value=value): self.assertGreater(value, 0, content)
def test_mirror_sync(self): """Sync multiple plugin into the same repo with mirror as `True`. This test targets the following issue: 4448 * `<https://pulp.plan.io/issues/4448>`_ This test does the following: 1. Create a repo. 2. Create two remotes a. RPM remote b. File remote 3. Sync the repo with RPM remote. 4. Sync the repo with File remote with ``Mirror=True``. 5. Verify whether the content in the latest version of the repo has only File content and RPM content is deleted. """ # Step 1 repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) # Step 2 rpm_remote = self.client.post(RPM_REMOTE_PATH, gen_remote(url=RPM_UNSIGNED_FIXTURE_URL)) self.addCleanup(self.client.delete, rpm_remote['_href']) file_remote = self.client.post( FILE_REMOTE_PATH, gen_remote(url=FILE_FIXTURE_MANIFEST_URL)) self.addCleanup(self.client.delete, file_remote['_href']) # Step 3 sync(self.cfg, rpm_remote, repo) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) self.assertDictEqual(get_added_content_summary(repo), RPM_FIXTURE_SUMMARY) # Step 4 sync(self.cfg, file_remote, repo, mirror=True) repo = self.client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) self.assertDictEqual(get_added_content_summary(repo), FILE_FIXTURE_SUMMARY) # Step 5 self.assertDictEqual(get_content_summary(repo), FILE_FIXTURE_SUMMARY) self.assertDictEqual(get_removed_content_summary(repo), RPM_FIXTURE_SUMMARY)
def populate_pulp(cfg, url=DOCKER_V2_FEED_URL): """Add docker contents to Pulp. :param pulp_smash.config.PulpSmashConfig: Information about a Pulp application. :param url: The docker repository URL. Defaults to :data:`pulp_smash.constants.DOCKER_FIXTURE_URL` :returns: A list of dicts, where each dict describes one file content in Pulp. """ client = api.Client(cfg, api.json_handler) remote = {} repo = {} try: remote.update( client.post( DOCKER_REMOTE_PATH, gen_remote(url, upstream_name=DOCKER_UPSTREAM_NAME) ) ) repo.update(client.post(REPO_PATH, gen_repo())) sync(cfg, remote, repo) finally: if remote: client.delete(remote['pulp_href']) if repo: client.delete(repo['pulp_href']) return client.get(DOCKER_CONTENT_PATH)['results']
def gen_docker_remote(**kwargs): """Generate dict with common remote properties.""" return gen_remote( kwargs.pop('url', DOCKER_V2_FEED_URL), upstream_name=kwargs.pop('upstream_name', DOCKER_UPSTREAM_NAME), **kwargs )
def test_all(self): """Test whether sync/publish for content already in Pulp.""" cfg = config.get_config() client = api.Client(cfg, api.page_handler) # step 1. delete orphans to assure that no content is present on disk, # or database. delete_orphans(cfg) remote = client.post( FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL) ) self.addCleanup(client.delete, remote['_href']) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(client.delete, publisher['_href']) for _ in range(2): sync(cfg, remote, repo) repo = client.get(repo['_href']) publish(cfg, publisher, repo)
def test_all(self): """Verify whether the sync of a repository updates its version. This test explores the design choice stated in the `Pulp #3308`_ that a new repository version is created even if the sync does not add or remove any content units. Even without any changes to the remote if a new sync occurs, a new repository version is created. .. _Pulp #3308: https://pulp.plan.io/issues/3308 Do the following: 1. Create a repository, and an remote. 2. Sync the repository an arbitrary number of times. 3. Verify that the repository version is equal to the previous number of syncs. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) body = gen_remote(urljoin(FILE_FEED_URL, 'PULP_MANIFEST')) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) number_of_syncs = randint(1, 10) for _ in range(number_of_syncs): sync(cfg, remote, repo) repo = client.get(repo['_href']) path = urlsplit(repo['_latest_version_href']).path latest_repo_version = int(path.split('/')[-2]) self.assertEqual(latest_repo_version, number_of_syncs)
def populate_pulp(cfg, url=None): """Add file contents to Pulp. :param pulp_smash.config.PulpSmashConfig: Information about a Pulp application. :param url: The URL to a file repository's ``PULP_MANIFEST`` file. Defaults to :data:`pulp_smash.constants.FILE_FEED_URL` + ``PULP_MANIFEST``. :returns: A list of dicts, where each dict describes one file content in Pulp. """ if url is None: url = urljoin(FILE_FIXTURE_URL, 'PULP_MANIFEST') client = api.Client(cfg, api.json_handler) remote = {} repo = {} try: remote.update(client.post(FILE_REMOTE_PATH, gen_remote(url))) repo.update(client.post(REPO_PATH, gen_repo())) sync(cfg, remote, repo) finally: if remote: client.delete(remote['_href']) if repo: client.delete(repo['_href']) return client.get(FILE_CONTENT_PATH)['results']
def populate_pulp(cfg, url=None): """Add cookbook contents to Pulp. :param pulp_smash.config.PulpSmashConfig: Information about a Pulp application. :param url: The URL to a cookbook repository. Defaults to :data:`pulp_smash.constants.fixture_u1.url`. :returns: A list of dicts, where each dict describes one cookbook content in Pulp. """ if url is None: url = fixture_u1.url client = api.Client(cfg, api.json_handler) remote = {} repo = {} try: remote.update(client.post(COOKBOOK_REMOTE_PATH, gen_remote(url))) repo.update(client.post(REPO_PATH, gen_repo())) sync(cfg, remote, repo, mirror=True) finally: if remote: client.delete(remote["_href"]) if repo: client.delete(repo["_href"]) return client.get(COOKBOOK_CONTENT_PATH)["results"]
def populate_pulp(cfg, url=DOCKER_V2_FEED_URL): """Add docker contents to Pulp. :param pulp_smash.config.PulpSmashConfig: Information about a Pulp application. :param url: The docker repository URL. Defaults to :data:`pulp_smash.constants.DOCKER_FIXTURE_URL` :returns: A list of dicts, where each dict describes one file content in Pulp. """ client = api.Client(cfg, api.json_handler) remote = {} repo = {} try: remote.update( client.post( DOCKER_REMOTE_PATH, gen_remote(url, upstream_name=DOCKER_UPSTREAM_NAME) ) ) repo.update(client.post(REPO_PATH, gen_repo())) sync(cfg, remote, repo) finally: if remote: client.delete(remote['_href']) if repo: client.delete(repo['_href']) return client.get(DOCKER_CONTENT_PATH)['results']
def populate_pulp(cfg, url=None): """Add file contents to Pulp. :param pulp_smash.config.PulpSmashConfig: Information about a Pulp application. :param url: The URL to a file repository's ``PULP_MANIFEST`` file. Defaults to :data:`pulp_smash.constants.FILE_FIXTURE_URL` + ``PULP_MANIFEST``. :returns: A list of dicts, where each dict describes one file content in Pulp. """ if url is None: url = FILE_FIXTURE_MANIFEST_URL client = api.Client(cfg, api.json_handler) remote = {} repo = {} try: remote.update(client.post(FILE_REMOTE_PATH, gen_remote(url))) repo.update(client.post(REPO_PATH, gen_repo())) sync(cfg, remote, repo) finally: if remote: client.delete(remote['_href']) if repo: client.delete(repo['_href']) return client.get(FILE_CONTENT_PATH)['results']
def test_02_sync_content(self): """Sync content into the repository. Assert that: * The ``_versions_href`` API call is correct. * The ``_latest_version_href`` API call is correct. * The ``_latest_version_href + content/`` API call is correct. * The ``_latest_version_href + added_content/`` API call is correct. * The ``_latest_version_href + removed_content/`` API call is correct. * The ``content_summary`` attribute is correct. """ body = gen_remote(urljoin(FILE_FIXTURE_URL, 'PULP_MANIFEST')) self.remote.update(self.client.post(FILE_REMOTE_PATH, body)) sync(self.cfg, self.remote, self.repo) repo = self.client.get(self.repo['_href']) repo_versions = get_versions(repo) self.assertEqual(len(repo_versions), 1, repo_versions) self.assertIsNotNone(repo['_latest_version_href']) content = get_content(repo) self.assertEqual(len(content), FILE_FIXTURE_COUNT) added_content = get_added_content(repo) self.assertEqual(len(added_content), FILE_FIXTURE_COUNT, added_content) removed_content = get_removed_content(repo) self.assertEqual(len(removed_content), 0, removed_content) content_summary = self.get_content_summary(repo) self.assertEqual(content_summary, {'file': FILE_FIXTURE_COUNT})
def test_sync(self): """Sync repositories with the file plugin. In order to sync a repository an remote has to be associated within this repository. When a repository is created this version field is set as None. After a sync the repository version is updated. Do the following: 1. Create a repository, and an remote. 2. Assert that repository version is None. 3. Sync the remote. 4. Assert that repository version is not None. 5. Sync the remote one more time. 6. Assert that repository version is different from the previous one. """ client = api.Client(self.cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) body = gen_remote(urljoin(FILE_FEED_URL, 'PULP_MANIFEST')) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) # Sync the repository. self.assertIsNone(repo['_latest_version_href']) sync(self.cfg, remote, repo) repo = client.get(repo['_href']) self.assertIsNotNone(repo['_latest_version_href']) # Sync the repository again. latest_version_href = repo['_latest_version_href'] sync(self.cfg, remote, repo) repo = client.get(repo['_href']) self.assertNotEqual(latest_version_href, repo['_latest_version_href'])
def test_all(self): """Test content promotion for a distribution. This test targets the following issue: * `Pulp #4186 <https://pulp.plan.io/issues/4186>`_ Do the following: 1. Create a repository that has at least one repository version. 2. Create a publication. 3. Create 2 distributions - using the same publication. Those distributions will have different ``base_path``. 4. Assert that distributions have the same publication. 5. Select a content unit. Download that content unit from Pulp using the two different distributions. Assert that content unit has the same checksum when fetched from different distributions. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(FILE_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["pulp_href"]) remote = client.post(FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL)) self.addCleanup(client.delete, remote["pulp_href"]) sync(cfg, remote, repo) repo = client.get(repo["pulp_href"]) publication = create_file_publication(cfg, repo) self.addCleanup(client.delete, publication["pulp_href"]) distributions = [] for _ in range(2): body = gen_distribution() body["publication"] = publication["pulp_href"] distribution = client.using_handler(api.task_handler).post( FILE_DISTRIBUTION_PATH, body) distributions.append(distribution) self.addCleanup(client.delete, distribution["pulp_href"]) self.assertEqual(distributions[0]["publication"], distributions[1]["publication"], distributions) unit_urls = [] unit_path = get_added_content( repo)[FILE_CONTENT_NAME][0]["relative_path"] for distribution in distributions: unit_url = distribution["base_url"] unit_urls.append(urljoin(unit_url, unit_path)) client.response_handler = api.safe_handler self.assertEqual( hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(), hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(), unit_urls, )
def test_02_sync_content(self): """Sync content into the repository. Assert that: * The ``_versions_href`` API call is correct. * The ``_latest_version_href`` API call is correct. * The ``_latest_version_href + content/`` API call is correct. * The ``_latest_version_href + added_content/`` API call is correct. * The ``_latest_version_href + removed_content/`` API call is correct. * The ``content_summary`` attribute is correct. """ body = gen_remote(urljoin(FILE_FEED_URL, 'PULP_MANIFEST')) self.remote.update(self.client.post(FILE_REMOTE_PATH, body)) sync(self.cfg, self.remote, self.repo) repo = self.client.get(self.repo['_href']) repo_versions = get_versions(repo) self.assertEqual(len(repo_versions), 1, repo_versions) self.assertIsNotNone(repo['_latest_version_href']) content = get_content(repo) self.assertEqual(len(content), FILE_FEED_COUNT) added_content = get_added_content(repo) self.assertEqual(len(added_content), FILE_FEED_COUNT, added_content) removed_content = get_removed_content(repo) self.assertEqual(len(removed_content), 0, removed_content) content_summary = self.get_content_summary(repo) self.assertEqual(content_summary, {'file': FILE_FEED_COUNT})
def gen_npm_remote(url=NPM_FIXTURE_URL, **kwargs): """Return a semi-random dict for use in creating a npm Remote. :param url: The URL of an external content source. """ # FIXME: Add any fields specific to a npm remote here return gen_remote(url, **kwargs)
def do_create_repo_and_sync(self, client, policy): """ Create a repo and remote (fixture_u1) using `policy`. Sync the repo. Verify that a new version was created, the number of downloads and the number of content units. """ repo = client.post(COOKBOOK_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["pulp_href"]) body = gen_remote(fixture_u1.url, policy=policy) remote = client.post(COOKBOOK_REMOTE_PATH, body) self.addCleanup(client.delete, remote["pulp_href"]) # Sync the full repository: self.assert_initial_repo(repo) all_cookbook_count = fixture_u1.cookbook_count() task = self.sync_and_inspect_task_report(remote, repo, all_cookbook_count, policy=policy) repo = client.get(repo["pulp_href"]) latest_version_href = repo["latest_version_href"] self.assertIsNotNone(latest_version_href) self.assertEqual(latest_version_href, task["created_resources"][0]) self.verify_counts(repo, all_cookbook_count, all_cookbook_count, 0) return repo, remote
def test_all(self): """Verify multi-resourcing locking. Do the following: 1. Create a repository, and a remote. 2. Update the remote to point to a different url. 3. Immediately run a sync. The sync should fire after the update and sync from the second url. 4. Assert that remote url was updated. 5. Assert that the number of units present in the repository is according to the updated url. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) body = gen_remote(urljoin(FILE_LARGE_FEED_URL, 'PULP_MANIFEST')) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) url = {'url': urljoin(FILE_FEED_URL, 'PULP_MANIFEST')} client.patch(remote['_href'], url) sync(cfg, remote, repo) repo = client.get(repo['_href']) remote = client.get(remote['_href']) self.assertEqual(remote['url'], url['url']) self.assertEqual(len(get_content(repo)), FILE_FEED_COUNT)
def test_all(self): """Verify publisher and remote can be used with different repos. This test explores the design choice stated in `Pulp #3341`_ that remove the FK from publishers and remotes to repository. Allowing remotes and publishers to be used with different repositories. .. _Pulp #3341: https://pulp.plan.io/issues/3341 Do the following: 1. Create an remote, and a publisher. 2. Create 2 repositories. 3. Sync both repositories using the same remote. 4. Assert that the two repositories have the same contents. 5. Publish both repositories using the same publisher. 6. Assert that each generated publication has the same publisher, but are associated with different repositories. """ cfg = config.get_config() # Create an remote and publisher. client = api.Client(cfg, api.json_handler) body = gen_remote(urljoin(FILE_FIXTURE_URL, 'PULP_MANIFEST')) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(client.delete, publisher['_href']) # Create and sync repos. repos = [] for _ in range(2): repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) sync(cfg, remote, repo) repos.append(client.get(repo['_href'])) # Compare contents of repositories. contents = [] for repo in repos: contents.append(get_content(repo)) self.assertEqual( {content['_href'] for content in contents[0]}, {content['_href'] for content in contents[1]}, ) # Publish repositories. publications = [] for repo in repos: publications.append(publish(cfg, publisher, repo)) self.assertEqual( publications[0]['publisher'], publications[1]['publisher'] ) self.assertNotEqual( publications[0]['repository_version'], publications[1]['repository_version'] )
def gen_rpm_remote(url=None, **kwargs): """Return a semi-random dict for use in creating a RPM remote. :param url: The URL of an external content source. """ if url is None: url = RPM_UNSIGNED_FIXTURE_URL return gen_remote(url, **kwargs)
def test_all(self): """Verify publisher and remote can be used with different repos. This test explores the design choice stated in `Pulp #3341`_ that remove the FK from publishers and remotes to repository. Allowing remotes and publishers to be used with different repositories. .. _Pulp #3341: https://pulp.plan.io/issues/3341 Do the following: 1. Create an remote, and a publisher. 2. Create 2 repositories. 3. Sync both repositories using the same remote. 4. Assert that the two repositories have the same contents. 5. Publish both repositories using the same publisher. 6. Assert that each generated publication has the same publisher, but are associated with different repositories. """ cfg = config.get_config() # Create an remote and publisher. client = api.Client(cfg, api.json_handler) body = gen_remote(urljoin(FILE_FIXTURE_URL, 'PULP_MANIFEST')) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(client.delete, publisher['_href']) # Create and sync repos. repos = [] for _ in range(2): repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) sync(cfg, remote, repo) repos.append(client.get(repo['_href'])) # Compare contents of repositories. contents = [] for repo in repos: contents.append(get_content(repo)) self.assertEqual( {content['_href'] for content in contents[0]}, {content['_href'] for content in contents[1]}, ) # Publish repositories. publications = [] for repo in repos: publications.append(publish(cfg, publisher, repo)) self.assertEqual(publications[0]['publisher'], publications[1]['publisher']) self.assertNotEqual(publications[0]['repository_version'], publications[1]['repository_version'])
def gen_docker_remote(**kwargs): """Return a semi-random dict for use in creating a docker Remote. :param url: The URL of an external content source. """ remote = gen_remote(DOCKER_FIXTURE_URL) docker_extra_fields = {'upstream_name': 'busybox', **kwargs} remote.update(**docker_extra_fields) return remote
def gen_maven_remote(**kwargs): """Return a semi-random dict for use in creating a maven Remote. :param url: The URL of an external content source. """ remote = gen_remote(MAVEN_FIXTURE_URL) maven_extra_fields = {**kwargs} remote.update(**maven_extra_fields) return remote
def gen_container_remote(url=REGISTRY_V2_FEED_URL, **kwargs): """Return a semi-random dict for use in creating a container Remote. :param url: The URL of an external content source. """ return gen_remote(url, upstream_name=kwargs.pop("upstream_name", PULP_HELLO_WORLD_REPO), **kwargs)
def gen_gem_remote(**kwargs): """Return a semi-random dict for use in creating a gem Remote. :param url: The URL of an external content source. """ remote = gen_remote(GEM_FIXTURE_URL) gem_extra_fields = {**kwargs} remote.update(**gem_extra_fields) return remote
def test_all(self): """Verify the set up of parameters related to auto distribution. This test targets the following issues: * `Pulp #3295 <https://pulp.plan.io/issues/3295>`_ * `Pulp #3392 <https://pulp.plan.io/issues/3392>`_ * `Pulp #3394 <https://pulp.plan.io/issues/3394>`_ * `Pulp #3671 <https://pulp.plan.io/issues/3671>`_ * `Pulp Smash #883 <https://github.com/PulpQE/pulp-smash/issues/883>`_ * `Pulp Smash #917 <https://github.com/PulpQE/pulp-smash/issues/917>`_ """ # Create a repository and a publisher. repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) publisher = self.client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(self.client.delete, publisher['_href']) # Create a distribution. self.try_create_distribution(publisher=publisher['_href']) self.try_create_distribution(repository=repo['_href']) body = gen_distribution() body['publisher'] = publisher['_href'] body['repository'] = repo['_href'] response_dict = self.client.post(DISTRIBUTION_PATH, body) dist_task = self.client.get(response_dict['task']) distribution_href = dist_task['created_resources'][0] distribution = self.client.get(distribution_href) self.addCleanup(self.client.delete, distribution['_href']) # Update the distribution. self.try_update_distribution(distribution, publisher=None) self.try_update_distribution(distribution, repository=None) self.client.patch(distribution['_href'], { 'publisher': None, 'repository': None, }) distribution = self.client.get(distribution['_href']) self.assertIsNone(distribution['publisher'], distribution) self.assertIsNone(distribution['repository'], distribution) # Publish the repository. Assert that distribution does not point to # the new publication (because publisher and repository are unset). remote = self.client.post( FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL), ) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) publication = publish(self.cfg, publisher, repo) self.addCleanup(self.client.delete, publication['_href']) distribution = self.client.get(distribution['_href']) self.assertNotEqual(distribution['publication'], publication['_href'])
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication without supplying a repository_version (i.e. take the latest ``repository_version``). 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() delete_orphans() client = api.Client(cfg, api.json_handler) body = gen_remote(fixture_u1.url, cookbooks={fixture_u1.example1_name: ""}) remote = client.post(COOKBOOK_REMOTE_PATH, body) self.addCleanup(client.delete, remote["pulp_href"]) repo = client.post(COOKBOOK_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["pulp_href"]) sync(cfg, remote, repo, mirror=True) repo = client.get(repo["pulp_href"]) repo_content = get_cookbook_content(repo) self.assertTrue(repo_content) # Step 1 repo = client.post(COOKBOOK_REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["pulp_href"]) for cookbook in repo_content: modify_repo(cfg, repo, add_units=[cookbook]) version_hrefs = tuple(ver["pulp_href"] for ver in get_versions(repo)) non_latest = choice(version_hrefs[:-1]) # Step 2 publication = create_publication(cfg, repo) # Step 3 self.assertEqual(publication["repository_version"], version_hrefs[-1]) # Step 4 publication = create_publication(cfg, repo, version_href=non_latest) # Step 5 self.assertEqual(publication["repository_version"], non_latest) # Step 6 with self.assertRaises(HTTPError): body = {"repository": repo["pulp_href"], "repository_version": non_latest} client.post(COOKBOOK_PUBLICATION_PATH, body)
def gen_file_remote(url=None, **kwargs): """Return a semi-random dict for use in creating a file Remote. :param url: The URL of an external content source. """ if url is None: url = FILE_FIXTURE_MANIFEST_URL return gen_remote(url, **kwargs)
def gen_ansible_remote(url=ANSIBLE_FIXTURE_URL, **kwargs): """Return a semi-random dict for use in creating a ansible Remote. :param url: The URL of an external content source. """ if "rate_limit" not in kwargs: kwargs["rate_limit"] = 10 return gen_remote(url, **kwargs)
def test_all(self): """Test whether a particular repository version can be published. 1. Create a repository with at least 2 repository versions. 2. Create a publication by supplying the latest ``repository_version``. 3. Assert that the publication ``repository_version`` attribute points to the latest repository version. 4. Create a publication by supplying the non-latest ``repository_version``. 5. Assert that the publication ``repository_version`` attribute points to the supplied repository version. 6. Assert that an exception is raised when providing two different repository versions to be published at same time. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) body = gen_remote(urljoin(FILE_FIXTURE_URL, 'PULP_MANIFEST')) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) sync(cfg, remote, repo) publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(client.delete, publisher['_href']) # Step 1 repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) for file_content in client.get(FILE_CONTENT_PATH)['results']: client.post(repo['_versions_href'], {'add_content_units': [file_content['_href']]}) version_hrefs = tuple(ver['_href'] for ver in get_versions(repo)) non_latest = choice(version_hrefs[:-1]) # Step 2 publication = publish(cfg, publisher, repo) # Step 3 self.assertEqual(publication['repository_version'], version_hrefs[-1]) # Step 4 publication = publish(cfg, publisher, repo, non_latest) # Step 5 self.assertEqual(publication['repository_version'], non_latest) # Step 6 with self.assertRaises(HTTPError): body = { 'repository': repo['_href'], 'repository_version': non_latest } client.post(urljoin(publisher['_href'], 'publish/'), body)
def test_02_create_same_name(self): """Try to create a second remote with an identical name. See: `Pulp Smash #1055 <https://github.com/PulpQE/pulp-smash/issues/1055>`_. """ body = gen_remote(RPM_SIGNED_FIXTURE_URL) body['name'] = self.remote['name'] with self.assertRaises(HTTPError): self.client.post(RPM_REMOTE_PATH, body)
def gen_plugin_template_remote(**kwargs): """Return a semi-random dict for use in creating a plugin_template Remote. :param url: The URL of an external content source. """ remote = gen_remote(PLUGIN_TEMPLATE_FIXTURE_URL) # FIXME: Add any fields specific to a plugin_teplate remote here plugin_template_extra_fields = {**kwargs} remote.update(**plugin_template_extra_fields) return remote
def create_and_sync_repo(self, cfg, client, policy): repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo["_href"]) body = gen_remote(fixture_u1.url, policy=policy) remote = client.post(COOKBOOK_REMOTE_PATH, body) self.addCleanup(client.delete, remote["_href"]) sync(cfg, remote, repo, mirror=True) return client.get(repo["_href"])
def do_test(self, url, **remote_kwargs): """Sync a repository given ``url`` on the remote.""" repo = self.client.post(COOKBOOK_REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo["pulp_href"]) body = gen_remote(url=url, **remote_kwargs) remote = self.client.post(COOKBOOK_REMOTE_PATH, body) self.addCleanup(self.client.delete, remote["pulp_href"]) with self.assertRaises(TaskReportError) as context: sync(self.cfg, remote, repo, mirror=True) return context
def test_02_create_same_name(self): """Try to create a second remote with an identical name. See: `Pulp Smash #1055 <https://github.com/PulpQE/pulp-smash/issues/1055>`_. """ body = gen_remote(fixture_u1.url) body["name"] = self.remote["name"] with self.assertRaises(HTTPError): self.client.post(COOKBOOK_REMOTE_PATH, body)
def gen_rpm_remote(**kwargs): """Return a semi-random dict for use in creating a rpm Remote. :param url: The URL of an external content source. """ remote = gen_remote(RPM_UNSIGNED_FIXTURE_URL) rpm_extra_fields = { **kwargs } remote.update(rpm_extra_fields) return remote
def test_all(self): """Verify whether is possible to create a remote without a URL. This test targets the following issues: * `Pulp #3395 <https://pulp.plan.io/issues/3395>`_ * `Pulp Smash #984 <https://github.com/PulpQE/pulp-smash/issues/984>`_ """ body = gen_remote(utils.uuid4()) del body['url'] with self.assertRaises(HTTPError): api.Client(config.get_config()).post(RPM_REMOTE_PATH, body)
def gen_deb_remote(**kwargs): """Return a semi-random dict for use in creating a deb Remote. :param url: The URL of an external content source. """ remote = gen_remote(DEB_FIXTURE_URL) deb_extra_fields = { 'distributions': DEB_FIXTURE_RELEASE, **kwargs, } remote.update(**deb_extra_fields) return remote
def test_all(self): """Verify the set up of parameters related to auto distribution. This test targets the following issues: * `Pulp #3295 <https://pulp.plan.io/issues/3295>`_ * `Pulp #3392 <https://pulp.plan.io/issues/3392>`_ * `Pulp #3394 <https://pulp.plan.io/issues/3394>`_ * `Pulp #3671 <https://pulp.plan.io/issues/3671>`_ * `Pulp Smash #883 <https://github.com/PulpQE/pulp-smash/issues/883>`_ * `Pulp Smash #917 <https://github.com/PulpQE/pulp-smash/issues/917>`_ """ # Create a repository and a publisher. repo = self.client.post(REPO_PATH, gen_repo()) self.addCleanup(self.client.delete, repo['_href']) publisher = self.client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(self.client.delete, publisher['_href']) # Create a distribution. self.try_create_distribution(publisher=publisher['_href']) self.try_create_distribution(repository=repo['_href']) body = gen_distribution() body['publisher'] = publisher['_href'] body['repository'] = repo['_href'] distribution = self.client.post(DISTRIBUTION_PATH, body) self.addCleanup(self.client.delete, distribution['_href']) # Update the distribution. self.try_update_distribution(distribution, publisher=None) self.try_update_distribution(distribution, repository=None) distribution = self.client.patch(distribution['_href'], { 'publisher': None, 'repository': None, }) self.assertIsNone(distribution['publisher'], distribution) self.assertIsNone(distribution['repository'], distribution) # Publish the repository. Assert that distribution does not point to # the new publication (because publisher and repository are unset). remote = self.client.post( FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL), ) self.addCleanup(self.client.delete, remote['_href']) sync(self.cfg, remote, repo) publication = publish(self.cfg, publisher, repo) self.addCleanup(self.client.delete, publication['_href']) distribution = self.client.get(distribution['_href']) self.assertNotEqual(distribution['publication'], publication['_href'])
def _gen_verbose_remote(): """Return a semi-random dict for use in defining an remote. For most tests, it's desirable to create remotes with as few attributes as possible, so that the tests can specifically target and attempt to break specific features. This module specifically targets remotes, so it makes sense to provide as many attributes as possible. Note that 'username' and 'password' are write-only attributes. """ attrs = gen_remote(RPM_SIGNED_FIXTURE_URL) attrs.update({ 'password': utils.uuid4(), 'username': utils.uuid4(), 'validate': choice((False, True)), }) return attrs
def test_clean_orphan_content_unit(self): """Test whether orphan content units can be clean up. Do the following: 1. Create, and sync a repo. 2. Remove a content unit from the repo. This will create a second repository version, and create an orphan content unit. 3. Assert that content unit that was removed from the repo and its artifact are present on disk. 4. Delete orphans. 5. Assert that the orphan content unit was cleaned up, and its artifact is not present on disk. """ repo = self.api_client.post(REPO_PATH, gen_repo()) self.addCleanup(self.api_client.delete, repo['_href']) body = gen_remote(urljoin(FILE_FIXTURE_URL, 'PULP_MANIFEST')) remote = self.api_client.post(FILE_REMOTE_PATH, body) self.addCleanup(self.api_client.delete, remote['_href']) sync(self.cfg, remote, repo) repo = self.api_client.get(repo['_href']) content = choice(get_content(repo)) # Create an orphan content unit. self.api_client.post( repo['_versions_href'], {'remove_content_units': [content['_href']]} ) # Verify that the artifact is present on disk. artifact_path = self.api_client.get(content['artifact'])['file'] cmd = self.sudo + ('ls', artifact_path) self.cli_client.run(cmd) # Delete first repo version. The previous removed content unit will be # an orphan. delete_version(repo, get_versions(repo)[0]['_href']) content_units = self.api_client.get(FILE_CONTENT_PATH)['results'] self.assertIn(content, content_units) delete_orphans() content_units = self.api_client.get(FILE_CONTENT_PATH)['results'] self.assertNotIn(content, content_units) # Verify that the artifact was removed from disk. with self.assertRaises(CalledProcessError): self.cli_client.run(cmd)
def gen_python_remote(url=PYTHON_FIXTURES_URL, includes=None, **kwargs): """ Return a semi-random dict for use in creating a remote. Kwargs: url (str): The URL to a Python remote repository includes (iterable): An iterable of dicts containing project specifier dicts. **kwargs: Specified parameters for the Remote """ remote = gen_remote(url) if includes is None: includes = PYTHON_XS_PROJECT_SPECIFIER # Remote also supports "excludes" and "prereleases". python_extra_fields = { 'includes': includes, **kwargs, } remote.update(python_extra_fields) return remote
def test_all(self): """Test whether the content present in a repo version is immutable. Do the following: 1. Create a repository that has at least one repository version. 2. Attempt to update the content of a repository version. 3. Assert that an HTTP exception is raised. 4. Assert that the repository version was not updated. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) body = gen_remote(urljoin(FILE_FIXTURE_URL, 'PULP_MANIFEST')) remote = client.post(FILE_REMOTE_PATH, body) self.addCleanup(client.delete, remote['_href']) sync(cfg, remote, repo) latest_version_href = client.get(repo['_href'])['_latest_version_href'] with self.assertRaises(HTTPError): client.post(latest_version_href) repo = client.get(repo['_href']) self.assertEqual(latest_version_href, repo['_latest_version_href'])
def test_all(self): """Test content promotion for a distribution. This test targets the following issue: * `Pulp #4186 <https://pulp.plan.io/issues/4186>`_ Do the following: 1. Create a repository that has at least one repository version. 2. Create a publisher, and publication. 3. Create 2 distributions - using the same publication. Those distributions will have different ``base_path``. 4. Assert that distributions have the same publication. 5. Select a content unit. Download that content unit from Pulp using the two different distributions. Assert that content unit has the same checksum when fetched from different distributions. """ cfg = config.get_config() client = api.Client(cfg, api.json_handler) repo = client.post(REPO_PATH, gen_repo()) self.addCleanup(client.delete, repo['_href']) remote = client.post( FILE_REMOTE_PATH, gen_remote(FILE_FIXTURE_MANIFEST_URL) ) self.addCleanup(client.delete, remote['_href']) sync(cfg, remote, repo) repo = client.get(repo['_href']) publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher()) self.addCleanup(client.delete, publisher['_href']) publication = publish(cfg, publisher, repo) self.addCleanup(client.delete, publication['_href']) distributions = [] for _ in range(2): body = gen_distribution() body['publication'] = publication['_href'] distribution = client.post(DISTRIBUTION_PATH, body) distributions.append(distribution) self.addCleanup(client.delete, distribution['_href']) self.assertEqual( distributions[0]['publication'], distributions[1]['publication'], distributions ) unit_urls = [] unit_path = get_added_content(repo)[FILE_CONTENT_NAME][0]['relative_path'] for distribution in distributions: unit_url = cfg.get_hosts('api')[0].roles['api']['scheme'] unit_url += '://' + distribution['base_url'] + '/' unit_urls.append(urljoin(unit_url, unit_path)) client.response_handler = api.safe_handler self.assertEqual( hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(), hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(), unit_urls, )