Esempio n. 1
0
    def test_positive_remove_contentguard(self):
        """Assert that content can be download without guard if it is removed.

        1. Create a protected distribution using the self.contentguard
        2. Assert content cannot be downloaded without keys
        3. Remove the contentguard
        4. Assert content can be downloaded without keys
        """
        # Create a protected distribution
        distribution = self.client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH,
            gen_distribution(publication=self.publication['_href'],
                             content_guard=self.certguard['_href']))
        self.addCleanup(self.client.delete, distribution['_href'])

        # Pick a filename
        unit_path = choice(get_file_content_paths(self.repo))

        # Try to download it without the SSL-CLIENT-CERTIFICATE
        with self.assertRaises(HTTPError):
            download_content_unit(self.cfg, distribution, unit_path)

        # Update distribution removing the guard
        distribution = self.client.using_handler(api.task_handler).patch(
            distribution['_href'], {'content_guard': None})

        # Now content can be downloaded
        download_content_unit(self.cfg, distribution, unit_path)
    def test_reclaim_on_demand_content(self):
        """
        Test whether on_demand repository content can be reclaimed
        and then re-populated back after client request.
        """
        repo, distribution = self._repo_sync_distribute(policy="on_demand")

        artifacts_before_download = self.artifacts_api.list().count
        content = get_content(repo.to_dict())[FILE_CONTENT_NAME][0]
        download_content_unit(self.cfg, distribution.to_dict(),
                              content["relative_path"])

        artifacts = self.artifacts_api.list().count
        self.assertGreater(artifacts, artifacts_before_download)

        # reclaim disk space
        reclaim_response = self.reclaim_api.reclaim(
            {"repo_hrefs": [repo.pulp_href]})
        monitor_task(reclaim_response.task)

        artifacts_after_reclaim = self.artifacts_api.list().count
        content = get_content(repo.to_dict())[FILE_CONTENT_NAME]
        download_content_unit(self.cfg, distribution.to_dict(),
                              content[0]["relative_path"])

        artifacts = self.artifacts_api.list().count
        self.assertGreater(artifacts, artifacts_after_reclaim)
Esempio n. 3
0
    def test_content_remote_delete(self):
        """Assert that an HTTP error is raised when remote is deleted.

        Also verify that the content can be downloaded from Pulp once the
        remote is recreated and another sync is triggered.
        """
        cfg = config.get_config()
        delete_orphans(cfg)
        client = api.Client(cfg, api.page_handler)

        repo = client.post(FILE_REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['pulp_href'])

        body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES))
        remote = client.post(FILE_REMOTE_PATH, body)

        # Sync the repository using a lazy download policy.
        sync(cfg, remote, repo)
        repo = client.get(repo['pulp_href'])

        publication = create_file_publication(cfg, repo)
        self.addCleanup(client.delete, publication['pulp_href'])

        # Delete the remote.
        client.delete(remote['pulp_href'])

        body = gen_distribution()
        body['publication'] = publication['pulp_href']
        distribution = client.using_handler(api.task_handler).post(
            FILE_DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution['pulp_href'])

        unit_path = choice([
            content_unit['relative_path']
            for content_unit in get_content(repo)[FILE_CONTENT_NAME]
        ])

        # Assert that an HTTP error is raised when one to fetch content from
        # the distribution once the remote was removed.
        with self.assertRaises(HTTPError) as ctx:
            download_content_unit(cfg, distribution, unit_path)
        for key in ('not', 'found'):
            self.assertIn(key, ctx.exception.response.reason.lower())

        # Recreating a remote and re-triggering a sync will cause these broken
        # units to recover again.
        body = gen_file_remote(policy=choice(ON_DEMAND_DOWNLOAD_POLICIES))
        remote = client.post(FILE_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['pulp_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['pulp_href'])

        content = download_content_unit(cfg, distribution, unit_path)
        pulp_hash = hashlib.sha256(content).hexdigest()

        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path))).hexdigest()

        self.assertEqual(pulp_hash, fixtures_hash)
Esempio n. 4
0
    def test_all(self):
        """Sync and publish an RPM repository and verify the checksum."""
        # Step 1
        repo = self.client.post(REPO_PATH, gen_repo())
        self.addCleanup(self.client.delete, repo['_href'])

        remote = self.client.post(RPM_REMOTE_PATH, gen_rpm_remote())
        self.addCleanup(self.client.delete, remote['_href'])

        # Step 2
        sync(self.cfg, remote, repo)
        repo = self.client.get(repo['_href'])

        self.assertIsNotNone(repo['_latest_version_href'])

        # Step 3
        publication = publish(self.cfg, repo)
        self.addCleanup(self.client.delete, publication['_href'])
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = self.client.using_handler(api.task_handler).post(
            RPM_DISTRIBUTION_PATH, body)
        self.addCleanup(self.client.delete, distribution['_href'])
        # Step 4
        repo_md = ElementTree.fromstring(
            download_content_unit(self.cfg, distribution,
                                  'repodata/repomd.xml'))
        update_info_content = ElementTree.fromstring(
            download_content_unit(self.cfg, distribution,
                                  self._get_updateinfo_xml_path(repo_md)))
        tags = {elem.tag for elem in update_info_content.iter()}
        self.assertNotIn('sum', tags, update_info_content)
Esempio n. 5
0
    def test_01_sync(self):
        """Assert that syncing the repository triggers auto-publish and auto-distribution."""
        self.assertEqual(self.publications_api.list().count, 0)
        self.assertTrue(self.distribution.publication is None)

        # Sync the repository.
        repository_sync_data = RepositorySyncURL(remote=self.remote.pulp_href)
        sync_response = self.repo_api.sync(self.repo.pulp_href,
                                           repository_sync_data)
        task = monitor_task(sync_response.task)

        # Check that all the appropriate resources were created
        self.assertGreater(len(task.created_resources), 1)
        publications = self.publications_api.list()
        self.assertEqual(publications.count, 1)
        download_content_unit(self.cfg, self.distribution.to_dict(),
                              self.CUSTOM_MANIFEST)

        # Check that the publish settings were used
        publication = publications.results[0]
        self.assertEqual(publication.manifest, self.CUSTOM_MANIFEST)

        # Sync the repository again. Since there should be no new repository version, there
        # should be no new publications or distributions either.
        sync_response = self.repo_api.sync(self.repo.pulp_href,
                                           repository_sync_data)
        task = monitor_task(sync_response.task)

        self.assertEqual(len(task.created_resources), 0)
        self.assertEqual(self.publications_api.list().count, 1)
    def test_immediate_reclaim_becomes_on_demand(self):
        """Tests if immediate content becomes like on_demand content after reclaim."""
        repo, distribution = self._repo_sync_distribute()

        artifacts_before_reclaim = self.artifacts_api.list().count
        self.assertGreater(artifacts_before_reclaim, 0)
        content = get_content(repo.to_dict())[FILE_CONTENT_NAME][0]
        # Populate cache
        download_content_unit(self.cfg, distribution.to_dict(),
                              content["relative_path"])

        reclaim_response = self.reclaim_api.reclaim(
            {"repo_hrefs": [repo.pulp_href]})
        monitor_task(reclaim_response.task)

        artifacts_after_reclaim = self.artifacts_api.list().count
        self.assertLess(artifacts_after_reclaim, artifacts_before_reclaim)

        download_content_unit(self.cfg, distribution.to_dict(),
                              content["relative_path"])
        artifacts_after_download = self.artifacts_api.list().count
        # Downloading a reclaimed content will increase the artifact count by 1
        self.assertEqual(artifacts_after_download, artifacts_after_reclaim + 1)
        # But only 1 extra artifact will be downloaded, so still less than after immediate sync
        self.assertLess(artifacts_after_download, artifacts_before_reclaim)
 def test_nonpublished_content_not_served(self):
     """Verify content that hasn't been published is not served."""
     self.setup_download_test("immediate", publish=False)
     files = ["", "1.iso", "2.iso", "3.iso"]
     for file in files:
         with self.assertRaises(HTTPError, msg=f"{file}") as cm:
             download_content_unit(self.cfg, self.distribution.to_dict(), file)
         self.assertEqual(cm.exception.response.status_code, 404, f"{file}")
Esempio n. 8
0
    def test_remote_artifact_url_update(self):
        """Test that downloading on_demand content works after a repository layout change."""

        FILE_NAME = "1.iso"

        # 1. Create a remote, repository and distribution - remote URL has links that should 404
        remote_config = gen_file_remote(
            policy="on_demand",
            url=FILE_FIXTURE_WITH_MISSING_FILES_MANIFEST_URL)
        remote = self.remote_api.create(remote_config)
        self.addCleanup(self.remote_api.delete, remote.pulp_href)

        repo = self.repo_api.create(
            gen_repo(autopublish=True, remote=remote.pulp_href))
        self.addCleanup(self.repo_api.delete, repo.pulp_href)

        body = gen_distribution(repository=repo.pulp_href)
        distribution_response = self.distributions_api.create(body)
        created_resources = monitor_task(
            distribution_response.task).created_resources
        distribution = self.distributions_api.read(created_resources[0])
        self.addCleanup(self.distributions_api.delete, distribution.pulp_href)

        # 2. Sync the repository, verify that downloading artifacts fails
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)

        sync_response = self.repo_api.sync(repo.pulp_href,
                                           repository_sync_data)
        monitor_task(sync_response.task)

        with self.assertRaises(HTTPError):
            download_content_unit(self.cfg, distribution.to_dict(), FILE_NAME)

        # 3. Update the remote URL with one that works, sync again, check that downloading
        # artifacts works.
        update_response = self.remote_api.update(
            remote.pulp_href,
            gen_file_remote(policy="on_demand", url=FILE_FIXTURE_MANIFEST_URL))
        monitor_task(update_response.task)

        sync_response = self.repo_api.sync(repo.pulp_href,
                                           repository_sync_data)
        monitor_task(sync_response.task)

        content = download_content_unit(self.cfg, distribution.to_dict(),
                                        FILE_NAME)
        pulp_hash = hashlib.sha256(content).hexdigest()

        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(FILE_FIXTURE_URL, FILE_NAME))).hexdigest()

        self.assertEqual(pulp_hash, fixtures_hash)
Esempio n. 9
0
    def test_serving_acs_content(self):
        """Test serving of ACS content through the content app."""
        cfg = config.get_config()
        acs = self._create_acs()
        resp = self.file_acs_api.refresh(acs.pulp_href, acs)
        monitor_task_group(resp.task_group)

        remote = self.file_remote_api.create(
            gen_file_remote(FILE_MANIFEST_ONLY_FIXTURE_URL, policy="on_demand")
        )
        self.addCleanup(self.file_remote_api.delete, remote.pulp_href)

        repo = self.repo_api.create(gen_repo(remote=remote.pulp_href, autopublish=True))
        self.addCleanup(self.repo_api.delete, repo.pulp_href)

        distribution_response = self.distribution_api.create(
            gen_distribution(repository=repo.pulp_href)
        )
        created_resources = monitor_task(distribution_response.task).created_resources
        distribution = self.distribution_api.read(created_resources[0])
        self.addCleanup(self.distribution_api.delete, distribution.pulp_href)

        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo = self.repo_api.read(repo.pulp_href)

        unit_path = choice(get_file_content_paths(repo.to_dict()))
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(FILE_FIXTURE_URL, unit_path))
        ).hexdigest()
        content = download_content_unit(cfg, distribution.to_dict(), unit_path)
        pulp_hash = hashlib.sha256(content).hexdigest()

        self.assertEqual(fixtures_hash, pulp_hash)
Esempio n. 10
0
    def test_02_modify(self):
        """Assert that modifying the repository triggers auto-publish and auto-distribution."""
        self.assertEqual(self.publications_api.list().count, 0)
        self.assertTrue(self.distribution.publication is None)

        # Modify the repository by adding a content unit
        content = self.content_api.list().results[0].pulp_href
        modify_response = self.repo_api.modify(
            self.repo.pulp_href, {"add_content_units": [content]}
        )
        task = monitor_task(modify_response.task)

        # Check that all the appropriate resources were created
        self.assertGreater(len(task.created_resources), 1)
        self.assertEqual(self.publications_api.list().count, 1)
        download_content_unit(cfg, self.distribution.to_dict(), "simple/")
Esempio n. 11
0
    def do_test(self, policy):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(GEM_REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo["pulp_href"])

        body = gen_gem_remote()
        remote = client.post(GEM_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote["pulp_href"])

        sync(cfg, remote, repo)
        repo = client.get(repo["pulp_href"])

        # Create a publication.
        publication = create_gem_publication(cfg, repo)
        self.addCleanup(client.delete, publication["pulp_href"])

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication["pulp_href"]
        distribution = client.using_handler(api.task_handler).post(
            GEM_DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution["pulp_href"])

        # Pick a content unit, and download it from both Pulp Fixtures…
        unit_path = choice(get_gem_content_paths(repo))
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(GEM_FIXTURE_URL, unit_path))).hexdigest()

        # …and Pulp.
        content = download_content_unit(cfg, distribution, unit_path)
        pulp_hash = hashlib.sha256(content).hexdigest()

        self.assertEqual(fixtures_hash, pulp_hash)
Esempio n. 12
0
 def test_pypi_json(self):
     """Checks the basics 'pypi/{package_name}/json' endpoint
     Steps:
         1. Create Repo and Remote to only sync shelf-reader
         2. Sync with immediate policy
         3. Publish and Distribute new Repo
         4. Access JSON endpoint and verify received JSON matches source
     """
     self.addCleanup(self.repo_api.delete, self.repo.pulp_href)
     body = gen_python_remote(includes=["shelf-reader"], policy="immediate")
     self.sync_to_remote(body, create=True)
     self.addCleanup(self.remote_api.delete, self.remote.pulp_href)
     distro = self.gen_pub_dist()
     rel_url = "pypi/shelf-reader/json"
     package_json = download_content_unit(self.cfg, distro.to_dict(),
                                          rel_url)
     package = json.loads(package_json)
     self.assertEqual(SHELF_PYTHON_JSON["last_serial"],
                      package["last_serial"])
     self.assertTrue(
         SHELF_PYTHON_JSON["info"].items() <= package["info"].items())
     self.assertEqual(len(SHELF_PYTHON_JSON["urls"]), len(package["urls"]))
     self.assert_download_info(SHELF_PYTHON_JSON["urls"], package["urls"],
                               "Failed to match URLS")
     self.assertTrue(
         SHELF_PYTHON_JSON["releases"].keys() <= package["releases"].keys())
     for version in SHELF_PYTHON_JSON["releases"].keys():
         self.assert_download_info(SHELF_PYTHON_JSON["releases"][version],
                                   package["releases"][version],
                                   "Failed to match version")
Esempio n. 13
0
def set_distribution_base_path_and_download_a_content_unit_with_cert(
        file_distribution_href,
        base_path,
        file_repository_href,
        cert_path,
        content_path=None,
        url_encode=True):
    """
    Set the base path on the `distribution, read the cert, urlencode it, and then request one unit.

    If `content_path` is set, that path will be requested, otherwise a random, valid content unit
    path will be selected from the FileRepository at `file_repository_href`.

    1. Set the distribution referred to by `file_distribution_href` base_path to `base_path`.
    2. Read the cert from the filesystem and urlencode it.
    3. Make a request to `content_path` if specified, or to a random content item present in the
        `file_repository_href` repository. The urlencoded cert is submitted as the `X-CLIENT-CERT`
        header when requesting content.

    Args:
        file_distribution_href: The distribution href that is to be updated. This must refer to a
            distribution of type `FileDistribution`.
        base_path: The base path to set on the `distribution`.
        file_repository_href: The repository href that will have
        cert_path: The file system path to the certificate to be used in the content request. This
            will be read from the filesystem and urlencoded before being submitted as the
            `X-CLIENT-CERT` header when downloading content.
        content_path: The path to the specific content unit to be fetched. This is the portion of
            the url after the distribution URL. It's optional, and if unspecified a random, valid
            content unit will be selected instead from the repository.
        url_encode: If true, the certificate data read will be urlencoded, otherwise it won't be.
            This is an optional param, and defaults to True.

    Returns:
        The downloaded data.

    """
    distribution = set_distribution_base_path(file_distribution_href, base_path)

    if content_path is None:
        file_client = gen_file_client()
        file_repos_api = RepositoriesFileApi(file_client)
        repo = file_repos_api.read(file_repository_href)
        content_path = choice(get_file_content_paths(repo.to_dict()))

    if url_encode:
        cert_data = read_cert_and_urlencode(cert_path)
    else:
        cert_data = read_cert(cert_path)

    return download_content_unit(
        config.get_config(),
        distribution.to_dict(),
        content_path,
        headers={'X-CLIENT-CERT': cert_data}
    )
Esempio n. 14
0
    def test_negative_download_protected_content_without_keys(self):
        """Assert content protected by cert-guard cannot be downloaded.

        1. Create a protected distribution using the self.contentguard.
        2. Assert content cannot be downloaded without cert and key.
        """
        # Create a protected distribution
        distribution = self.client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH,
            gen_distribution(publication=self.publication['_href'],
                             content_guard=self.certguard['_href']))
        self.addCleanup(self.client.delete, distribution['_href'])

        # Pick a filename
        unit_path = choice(get_file_content_paths(self.repo))

        # Try to download it without the SSL-CLIENT-CERTIFICATE
        with self.assertRaises(HTTPError):
            download_content_unit(self.cfg, distribution, unit_path)
    def do_test_content_served(self):
        file_path = "1.iso"

        req1 = download_content_unit(self.cfg, self.distribution.to_dict(), file_path)
        req2 = download_content_unit(self.cfg, self.distribution.to_dict(), file_path)
        fixtures_hash = hashlib.sha256(utils.http_get(urljoin(FILE_URL, file_path))).hexdigest()

        first_dl_hash = hashlib.sha256(req1).hexdigest()
        second_dl_hash = hashlib.sha256(req2).hexdigest()

        self.assertEqual(first_dl_hash, fixtures_hash)
        self.assertEqual(first_dl_hash, second_dl_hash)

        manifest = download_content_unit(self.cfg, self.distribution.to_dict(), "PULP_MANIFEST")
        pulp_manifest = list(
            csv.DictReader(manifest.decode("utf-8").splitlines(), ("name", "checksum", "size"))
        )

        self.assertEqual(len(pulp_manifest), FILE_FIXTURE_COUNT, pulp_manifest)
Esempio n. 16
0
    def test_positive_add_contentguard_to_existing_distribution(self):
        """Assert adding contentguard to existing distribution works well.

        1. Create a distribution without protection
        2. Assert content can be downloaded
        3. Add contentguard to the distribution
        4. Assert content cannot be downloaded without key
        5. Assert content can be downloaded with key
        """
        # 1 unprotected distribution
        distribution = self.client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH,
            gen_distribution(publication=self.publication['_href']))
        self.addCleanup(self.client.delete, distribution['_href'])

        # Pick a filename
        unit_path = choice(get_file_content_paths(self.repo))

        # Download it without certificate
        download_content_unit(self.cfg, distribution, unit_path)

        # Update distribution adding the guard
        distribution = self.client.using_handler(api.task_handler).patch(
            distribution['_href'], {'content_guard': self.certguard['_href']})

        # Cannot download without key
        with self.assertRaises(HTTPError):
            download_content_unit(self.cfg, distribution, unit_path)

        # Try to download it passing the proper SSL-CLIENT-CERTIFICATE
        download_content_unit(
            self.cfg,
            distribution,
            unit_path,
            headers={'SSL-CLIENT-CERTIFICATE': self.client_cert})
Esempio n. 17
0
    def test_positive_download_protected_content_with_keys(self):
        """Assert content protected by cert-guard can be downloaded.

        1. Create a protected distribution using the self.contentguard.
        2. Assert content can be downloaded using the proper cert and key.
        """
        # Create a protected distribution
        distribution = self.client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH,
            gen_distribution(publication=self.publication['_href'],
                             content_guard=self.certguard['_href']))
        self.addCleanup(self.client.delete, distribution['_href'])

        # Pick a filename
        unit_path = choice(get_file_content_paths(self.repo))

        # Try to download it passing the proper SSL-CLIENT-CERTIFICATE
        download_content_unit(
            self.cfg,
            distribution,
            unit_path,
            headers={'SSL-CLIENT-CERTIFICATE': self.client_cert})
Esempio n. 18
0
    def test_autodistribute(self):
        """Test repo version retention with autopublish/autodistribute."""
        self._create_repo_versions({"retain_repo_versions": 1, "autopublish": True})

        # all but the last publication should be gone
        for publication in self.publications[:-1]:
            with self.assertRaises(ApiException) as ae:
                self.publication_api.read(publication.pulp_href)
            self.assertEqual(404, ae.exception.status)

        # check that the last publication is distributed
        manifest = download_content_unit(self.cfg, self.distro.to_dict(), "PULP_MANIFEST")
        self.assertEqual(manifest.decode("utf-8").count("\n"), len(self.content))
Esempio n. 19
0
    def test_denial_when_no_client_header_submitted(self):
        """
        Assert denial when a client submits no X-CLIENT-CERT header.

        1. Configure the distribution with a valid base path.
        2. Attempt to download content.
        3. Assert a 403 Unauthorized is returned.
        """
        distribution = set_distribution_base_path(
            self.distribution.pulp_href,
            self.DENIALS_BASE_PATH
        )

        content_path = ""

        with self.assertRaises(HTTPError) as raised_exception:
            download_content_unit(
                config.get_config(),
                distribution.to_dict(),
                content_path
            )
        self.assertEqual(raised_exception.exception.response.status_code, 403)
Esempio n. 20
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_
        """
        remote = self._create_remote()
        repo = self._create_repo_and_sync_with_remote(remote)
        pub = self._create_publication(repo)
        distro = self._create_distribution_from_publication(pub)
        # Pick a content unit (of each type), and download it from both Pulp Fixtures…
        unit_paths = [
            choice(paths)
            for paths in get_python_content_paths(repo.to_dict()).values()
        ]
        fixtures_hashes = [
            hashlib.sha256(
                utils.http_get(
                    urljoin(urljoin(PYTHON_FIXTURE_URL, "packages/"),
                            unit_path[0]))).hexdigest()
            for unit_path in unit_paths
        ]

        # …and Pulp.
        pulp_hashes = []
        for unit_path in unit_paths:
            content = download_content_unit(cfg, distro.to_dict(),
                                            unit_path[1])
            pulp_hashes.append(hashlib.sha256(content).hexdigest())

        self.assertEqual(fixtures_hashes, pulp_hashes)
Esempio n. 21
0
    def test_all(self):
        """Sync and publish an RPM repository and verify the checksum."""
        # Step 1
        repo = self.client.post(REPO_PATH, gen_repo())
        self.addCleanup(self.client.delete, repo['_href'])

        remote = self.client.post(RPM_REMOTE_PATH, gen_rpm_remote())
        self.addCleanup(self.client.delete, remote['_href'])

        # Step 2
        sync(self.cfg, remote, repo)
        repo = self.client.get(repo['_href'])

        self.assertIsNotNone(repo['_latest_version_href'])

        # Step 3
        publication = publish(self.cfg, repo)
        self.addCleanup(self.client.delete, publication['_href'])
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = self.client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH, body
        )
        self.addCleanup(self.client.delete, distribution['_href'])
        # Step 4
        repo_md = ElementTree.fromstring(
            download_content_unit(self.cfg, distribution, 'repodata/repomd.xml')
        )
        update_info_content = ElementTree.fromstring(
            download_content_unit(
                self.cfg,
                distribution,
                self._get_updateinfo_xml_path(repo_md)
            )
        )
        tags = {elem.tag for elem in update_info_content.iter()}
        self.assertNotIn('sum', tags, update_info_content)
Esempio n. 22
0
    def test_publish(self):
        """Publish particular empty repository with no packages.

        1. Create a repository with given distribtuions.
        2. Create a publication.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Assert that Package Index File is not empty.
        5. Assert that there are no packages.
        """
        # Create a repository:
        repo = deb_repository_api.create(gen_repo())
        self.addCleanup(deb_repository_api.delete, repo.pulp_href)

        # Create a remote:
        body = gen_deb_remote(url=DEB_FIXTURE_URL, distributions="ginnungagap")
        remote = deb_remote_api.create(body)
        self.addCleanup(deb_remote_api.delete, remote.pulp_href)

        # Sync the repository:
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = deb_repository_api.sync(repo.pulp_href,
                                                repository_sync_data)
        monitor_task(sync_response.task)
        repo = deb_repository_api.read(repo.pulp_href)
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(repo.to_dict()))

        self.assertIsNotNone(repo.latest_version_href)

        # Create a publication:
        publish_data = DebAptPublication(repository=repo.pulp_href,
                                         **self._publication_extra_args())
        publish_response = deb_apt_publication_api.create(publish_data)
        publication_href = monitor_task(
            publish_response.task).created_resources[0]
        self.addCleanup(deb_apt_publication_api.delete, publication_href)
        publication = deb_apt_publication_api.read(publication_href)

        # Test the publication:
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        release = get_content(repo=publication.to_dict(),
                              version_href=publication.repository_version)

        package_index_paths = [
            "dists/ginnungagap/asgard/binary-ppc64/Packages",
            "dists/ginnungagap/jotunheimr/binary-armeb/Packages",
            "dists/ginnungagap/asgard/binary-armeb/Packages",
            "dists/ginnungagap/jotunheimr/binary-ppc64/Packages",
            "dists/default/all/binary-all/Packages",
        ]

        self.assertFalse(release[DEB_PACKAGE_NAME])
        self.assertTrue(release[DEB_PACKAGE_INDEX_NAME])
        self.assertEqual(
            len(package_index_paths) - 1, len(release[DEB_PACKAGE_INDEX_NAME]))

        # Create a distribution:
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = deb_distribution_api.create(body)
        distribution_href = monitor_task(
            distribution_response.task).created_resources[0]
        distribution = deb_distribution_api.read(distribution_href)
        self.addCleanup(deb_distribution_api.delete, distribution.pulp_href)

        # Check that the expected package indecies are there:
        cfg = config.get_config()
        for package_index_path in package_index_paths:
            download_content_unit(cfg, distribution.to_dict(),
                                  package_index_path)
    def do_publish(self, expected_values):
        """Publish particular repository with missing package indices.

        1. Create a repository with missing package indices.
        2. Create a publication.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Assert that InRelease file path is equal to desired file path.
        5. Assert that the codename, suite and component are as expected.
        """
        # Create a repository:
        repo = deb_repository_api.create(gen_repo())
        self.addCleanup(deb_repository_api.delete, repo.pulp_href)

        # Create a remote:
        body = gen_deb_remote(
            url=DEB_MISSING_ARCH_DISTS_FIXTURE_URL,
            distributions=expected_values["distribution"],
            ignore_missing_package_indices=True,
        )
        remote = deb_remote_api.create(body)
        self.addCleanup(deb_remote_api.delete, remote.pulp_href)

        # Sync the repository:
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = deb_repository_api.sync(repo.pulp_href,
                                                repository_sync_data)
        monitor_task(sync_response.task)
        repo = deb_repository_api.read(repo.pulp_href)
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(repo.to_dict()))

        # Create a publication:
        publish_data = DebAptPublication(repository=repo.pulp_href,
                                         **self._publication_extra_args())
        publish_response = deb_apt_publication_api.create(publish_data)
        publication_href = monitor_task(
            publish_response.task).created_resources[0]
        self.addCleanup(deb_apt_publication_api.delete, publication_href)
        publication = deb_apt_publication_api.read(publication_href)

        # Test the publication:
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        release_file = get_content(repo=publication.to_dict(),
                                   version_href=publication.repository_version
                                   )[DEB_RELEASE_FILE_NAME][0]

        release_file_path = os.path.join(
            expected_values["release_file_folder"], "InRelease")
        self.assertEqual(release_file_path, release_file["relative_path"])
        self.assertEqual(expected_values["distribution"],
                         release_file["distribution"])
        self.assertEqual(expected_values["codename"], release_file["codename"])
        self.assertEqual(expected_values["suite"], release_file["suite"])

        release = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version)[DEB_RELEASE_NAME][0]

        self.assertEqual(expected_values["distribution"],
                         release["distribution"])
        self.assertEqual(expected_values["codename"], release["codename"])
        self.assertEqual(expected_values["suite"], release["suite"])

        components = get_content(repo=publication.to_dict(),
                                 version_href=publication.repository_version
                                 )[DEB_RELEASE_COMPONENT_NAME]

        self.assertEqual({c["component"]
                          for c in components},
                         set(expected_values["components"]))

        package_indices = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version
        )[DEB_PACKAGE_INDEX_NAME]

        # Packages has index in release file but may not be there
        self.assertNotEqual(len(expected_values["package_index_paths"]),
                            len(package_indices))
        for package_index in package_indices:  # all existing Packages files are there
            is_true = False
            for package_index_expected in expected_values[
                    "package_index_paths"]:
                if package_index["relative_path"] == os.path.join(
                        package_index_expected, "Packages"):
                    is_true = True
            self.assertTrue(is_true)

        self.assertFalse(
            os.path.isdir(
                os.path.join(remote.url,
                             "dists/ragnarok/asgard/binary-armeb")))
        self.assertFalse(
            os.path.isdir(
                os.path.join(remote.url,
                             "dists/ragnarok/jotunheimr/binary-armeb")))

        # Create a distribution:
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = deb_distribution_api.create(body)
        distribution_href = monitor_task(
            distribution_response.task).created_resources[0]
        distribution = deb_distribution_api.read(distribution_href)
        self.addCleanup(deb_distribution_api.delete, distribution.pulp_href)

        # Check that the expected Release files and package indices are there:
        cfg = config.get_config()
        release_file_path = os.path.join(
            expected_values["release_file_folder"], "Release")
        download_content_unit(cfg, distribution.to_dict(), release_file_path)

        for package_index_path in expected_values["package_index_paths"]:
            download_content_unit(cfg, distribution.to_dict(),
                                  package_index_path + "/Packages")
Esempio n. 24
0
    def do_publish(self, expected_values, modus):
        """Publish particular repository in flat format.

        1. Create a repository in flat repo format.
        2. Create a publication.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Assert that Release file path is equal to desired file path.
        5. Assert that the codename, suite and component are as expected.
        """
        # Create a repository:
        repo = deb_repository_api.create(gen_repo())
        self.addCleanup(deb_repository_api.delete, repo.pulp_href)

        # Create a remote:
        body = gen_deb_remote(  # DEB_FLAT_REPO_FIXTURE_URL
            url=DEB_FLAT_REPO_FIXTURE_URL,
            distributions=expected_values["distribution"])
        remote = deb_remote_api.create(body)
        self.addCleanup(deb_remote_api.delete, remote.pulp_href)

        # Sync the repository:
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = deb_repository_api.sync(repo.pulp_href,
                                                repository_sync_data)
        monitor_task(sync_response.task)
        repo = deb_repository_api.read(repo.pulp_href)
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(repo.to_dict()))

        self.assertIsNotNone(repo.latest_version_href)

        # Create a publication:
        if modus == "verbatim":
            publication_api = deb_verbatim_publication_api
            Publication = DebVerbatimPublication
        else:
            publication_api = deb_apt_publication_api
            Publication = DebAptPublication

        publish_data = Publication(repository=repo.pulp_href,
                                   **self._publication_extra_args(modus))
        publish_response = publication_api.create(publish_data)
        publication_href = monitor_task(
            publish_response.task).created_resources[0]
        self.addCleanup(publication_api.delete, publication_href)
        publication = publication_api.read(publication_href)

        # Test the publication:
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        release_file = get_content(repo=publication.to_dict(),
                                   version_href=publication.repository_version
                                   )[DEB_RELEASE_FILE_NAME][0]

        release_file_path = os.path.join(
            expected_values["release_file_folder_sync"], "Release")
        self.assertEqual(release_file_path, release_file["relative_path"])
        self.assertEqual(expected_values["distribution"],
                         release_file["distribution"])
        self.assertEqual(expected_values["codename"], release_file["codename"])
        self.assertEqual(expected_values["suite"], release_file["suite"])

        release = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version)[DEB_RELEASE_NAME][0]

        self.assertEqual(expected_values["distribution"],
                         release["distribution"])
        self.assertEqual(expected_values["codename"], release["codename"])
        self.assertEqual(expected_values["suite"], release["suite"])

        components = get_content(repo=publication.to_dict(),
                                 version_href=publication.repository_version
                                 )[DEB_RELEASE_COMPONENT_NAME]

        self.assertEqual(len(expected_values["components"]), len(components))
        for component in components:
            self.assertIn(component["component"],
                          expected_values["components"])

        package_indecies = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version
        )[DEB_PACKAGE_INDEX_NAME]

        self.assertEqual(len(expected_values["package_index_paths_sync"]),
                         len(package_indecies))
        for package_index in package_indecies:
            self.assertIn(package_index["relative_path"],
                          expected_values["package_index_paths_sync"])

        # Create a distribution:
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = deb_distribution_api.create(body)
        distribution_href = monitor_task(
            distribution_response.task).created_resources[0]
        distribution = deb_distribution_api.read(distribution_href)
        self.addCleanup(deb_distribution_api.delete, distribution.pulp_href)

        # Check that the expected Release files and package indecies are there:
        cfg = config.get_config()
        release_file_path = os.path.join(
            expected_values["release_file_folder_dist"], "Release")
        download_content_unit(cfg, distribution.to_dict(), release_file_path)
        for package_index_path in expected_values["package_index_paths_dist"]:
            download_content_unit(cfg, distribution.to_dict(),
                                  package_index_path)
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = gen_galaxy_client()
        repo_api = RepositoriesGalaxyApi(client)
        remote_api = RemotesGalaxyApi(client)
        publications = PublicationsGalaxyApi(client)
        distributions = DistributionsGalaxyApi(client)

        repo = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo.pulp_href)

        body = gen_galaxy_remote()
        remote = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote.pulp_href)

        # Sync a Repository
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo = repo_api.read(repo.pulp_href)

        # Create a publication.
        publish_data = GalaxyGalaxyPublication(repository=repo.pulp_href)
        publish_response = publications.create(publish_data)
        created_resources = monitor_task(publish_response.task)
        publication_href = created_resources[0]
        self.addCleanup(publications.delete, publication_href)

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = distributions.create(body)
        created_resources = monitor_task(distribution_response.task)
        distribution = distributions.read(created_resources[0])
        self.addCleanup(distributions.delete, distribution.pulp_href)

        # Pick a content unit (of each type), and download it from both Pulp Fixtures…
        unit_paths = [choice(paths) for paths in get_galaxy_content_paths(repo.to_dict()).values()]
        fixtures_hashes = [
            hashlib.sha256(utils.http_get(urljoin(GALAXY_FIXTURE_URL, unit_path[0]))).hexdigest()
            for unit_path in unit_paths
        ]

        # …and Pulp.
        pulp_hashes = []
        cfg = config.get_config()
        for unit_path in unit_paths:
            content = download_content_unit(cfg, distribution.to_dict(), unit_path[1])
            pulp_hashes.append(hashlib.sha256(content).hexdigest())

        self.assertEqual(fixtures_hashes, pulp_hashes)
Esempio n. 26
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_rpm_remote()
        remote = client.post(RPM_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publication.
        publication = publish(cfg, repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH, body
        )
        self.addCleanup(client.delete, distribution['_href'])

        # Pick a content unit, and download it from both Pulp Fixtures…
        unit_path = choice(get_rpm_package_paths(repo))
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(RPM_UNSIGNED_FIXTURE_URL, unit_path))
        ).hexdigest()

        # …and Pulp.
        content = download_content_unit(cfg, distribution, unit_path)
        pulp_hash = hashlib.sha256(content).hexdigest()

        self.assertEqual(fixtures_hash, pulp_hash)