示例#1
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
     body = gen_distribution()
     body['base_path'] = body['base_path'].replace('-', '/')
     cls.distribution = cls.client.post(DISTRIBUTION_PATH, body)
示例#2
0
 def test_01_create_distribution(self):
     """Create a distribution."""
     body = gen_distribution()
     type(self).distribution = self.client.post(DISTRIBUTION_PATH, body)
     for key, val in body.items():
         with self.subTest(key=key):
             self.assertEqual(self.distribution[key], val)
 def test_04_fully_update(self):
     """Update a distribution using HTTP PUT."""
     body = gen_distribution()
     self.client.put(self.distribution['_href'], body)
     type(self).distribution = self.client.get(self.distribution['_href'])
     for key, val in body.items():
         with self.subTest(key=key):
             self.assertEqual(self.distribution[key], val)
    def try_create_distribution(self, **kwargs):
        """Unsuccessfully create a distribution.

        Merge the given kwargs into the body of the request.
        """
        body = gen_distribution()
        body.update(kwargs)
        with self.assertRaises(HTTPError):
            self.client.post(DOCKER_DISTRIBUTION_PATH, body)
    def test_02_create_same_name(self):
        """Try to create a second distribution with an identical name.

        See: `Pulp Smash #1055
        <https://github.com/PulpQE/pulp-smash/issues/1055>`_.
        """
        body = gen_distribution()
        body['name'] = self.distribution['name']
        with self.assertRaises(HTTPError):
            self.client.post(DOCKER_DISTRIBUTION_PATH, body)
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
     body = gen_distribution()
     body['base_path'] = body['base_path'].replace('-', '/')
     response_dict = cls.client.post(DOCKER_DISTRIBUTION_PATH, body)
     dist_task = cls.client.get(response_dict['task'])
     distribution_href = dist_task['created_resources'][0]
     cls.distribution = cls.client.get(distribution_href)
    def test_negative_create_distribution_with_invalid_parameter(self):
        """Attempt to create distribution passing invalid parameter.

        Assert response returns an error 400 including ["Unexpected field"].
        """
        response = api.Client(self.cfg, api.echo_handler).post(
            DOCKER_DISTRIBUTION_PATH, gen_distribution(foo='bar')
        )
        assert response.status_code == 400
        assert response.json()['foo'] == ['Unexpected field']
示例#8
0
    def test_all(self):
        """Verify the set up of parameters related to auto distribution.

        This test targets the following issues:

        * `Pulp #3295 <https://pulp.plan.io/issues/3295>`_
        * `Pulp #3392 <https://pulp.plan.io/issues/3392>`_
        * `Pulp #3394 <https://pulp.plan.io/issues/3394>`_
        * `Pulp #3671 <https://pulp.plan.io/issues/3671>`_
        * `Pulp Smash #883 <https://github.com/PulpQE/pulp-smash/issues/883>`_
        * `Pulp Smash #917 <https://github.com/PulpQE/pulp-smash/issues/917>`_
        """
        # Create a repository and a publisher.
        repo = self.client.post(REPO_PATH, gen_repo())
        self.addCleanup(self.client.delete, repo['_href'])

        publisher = self.client.post(FILE_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(self.client.delete, publisher['_href'])

        # Create a distribution.
        self.try_create_distribution(publisher=publisher['_href'])
        self.try_create_distribution(repository=repo['_href'])
        body = gen_distribution()
        body['publisher'] = publisher['_href']
        body['repository'] = repo['_href']
        distribution = self.client.post(DISTRIBUTION_PATH, body)
        self.addCleanup(self.client.delete, distribution['_href'])

        # Update the distribution.
        self.try_update_distribution(distribution, publisher=None)
        self.try_update_distribution(distribution, repository=None)
        distribution = self.client.patch(distribution['_href'], {
            'publisher': None,
            'repository': None,
        })
        self.assertIsNone(distribution['publisher'], distribution)
        self.assertIsNone(distribution['repository'], distribution)

        # Publish the repository. Assert that distribution does not point to
        # the new publication (because publisher and repository are unset).
        remote = self.client.post(
            FILE_REMOTE_PATH,
            gen_remote(FILE_FIXTURE_MANIFEST_URL),
        )
        self.addCleanup(self.client.delete, remote['_href'])

        sync(self.cfg, remote, repo)

        publication = publish(self.cfg, publisher, repo)
        self.addCleanup(self.client.delete, publication['_href'])

        distribution = self.client.get(distribution['_href'])
        self.assertNotEqual(distribution['publication'], publication['_href'])
 def test_01_create_distribution(self):
     """Create a distribution."""
     body = gen_distribution()
     response_dict = self.client.post(
         DOCKER_DISTRIBUTION_PATH, body
     )
     dist_task = self.client.get(response_dict['task'])
     distribution_href = dist_task['created_resources'][0]
     type(self).distribution = self.client.get(distribution_href)
     for key, val in body.items():
         with self.subTest(key=key):
             self.assertEqual(self.distribution[key], val)
示例#10
0
    def test_all(self):
        """Verify whether package manager can consume content from Pulp.

        This test targets the following issue:

        `Pulp #3204 <https://pulp.plan.io/issues/3204>`_
        """
        cfg = config.get_config()
        try:
            cli.PackageManager._get_package_manager(cfg)  # pylint:disable=protected-access
        except NoKnownPackageManagerError:
            raise unittest.SkipTest('This test requires dnf or yum.')
        client = api.Client(cfg, api.json_handler)
        body = gen_rpm_remote()
        remote = client.post(RPM_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        sync(cfg, remote, repo)

        publisher = client.post(RPM_PUBLISHER_PATH, gen_rpm_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.post(DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution['_href'])

        repo_path = gen_yum_config_file(
            cfg,
            baseurl=urljoin(cfg.get_base_url(), urljoin(
                'pulp/content/',
                distribution['base_path']
            )),
            name=repo['name'],
            repositoryid=repo['name']
        )

        cli_client = cli.Client(cfg)
        self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True)
        rpm_name = 'walrus'
        pkg_mgr = cli.PackageManager(cfg)
        pkg_mgr.install(rpm_name)
        self.addCleanup(pkg_mgr.uninstall, rpm_name)
        rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-')
        self.assertEqual(rpm_name, rpm[0])
示例#11
0
    def test_05_read_publications(self):
        """Read a publication by its distribution."""
        body = gen_distribution()
        body['publication'] = self.publication['_href']
        distribution = self.client.post(DISTRIBUTION_PATH, body)
        self.addCleanup(self.client.delete, distribution['_href'])

        self.publication.update(self.client.get(self.publication['_href']))
        publications = self.client.get(PUBLICATIONS_PATH, params={
            'distributions': distribution['_href']
        })
        self.assertEqual(len(publications), 1, publications)
        for key, val in self.publication.items():
            with self.subTest(key=key):
                self.assertEqual(publications[0][key], val)
示例#12
0
    def test_all(self):
        """Sync and publish an RPM repository and verify the checksum."""
        # Step 1
        repo = self.client.post(REPO_PATH, gen_repo())
        self.addCleanup(self.client.delete, repo['_href'])

        remote = self.client.post(RPM_REMOTE_PATH, gen_rpm_remote())
        self.addCleanup(self.client.delete, remote['_href'])

        # Step 2
        sync(self.cfg, remote, repo)
        repo = self.client.get(repo['_href'])

        self.assertIsNotNone(repo['_latest_version_href'])

        # Step 3
        publication = publish(self.cfg, repo)
        self.addCleanup(self.client.delete, publication['_href'])
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = self.client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH, body
        )
        self.addCleanup(self.client.delete, distribution['_href'])
        # Step 4
        repo_md = ElementTree.fromstring(
            download_content_unit(self.cfg, distribution, 'repodata/repomd.xml')
        )
        update_info_content = ElementTree.fromstring(
            download_content_unit(
                self.cfg,
                distribution,
                self._get_updateinfo_xml_path(repo_md)
            )
        )
        tags = {elem.tag for elem in update_info_content.iter()}
        self.assertNotIn('sum', tags, update_info_content)
示例#13
0
    def test_all(self):
        """Test whether a particular repository version can be published.

        1. Create a repository with at least 2 repository versions.
        2. Create a publication by supplying the latest ``repository_version``.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Create a publication by supplying the non-latest ``repository_version``.
        5. Assert that the publication ``repository_version`` attribute points
           to the supplied repository version.
        6. Assert that an exception is raised when providing two different
           repository versions to be published at same time.
        """
        cfg = config.get_config()
        client = gen_rpm_client()
        repo_api = RepositoriesRpmApi(client)
        remote_api = RemotesRpmApi(client)
        publications = PublicationsRpmApi(client)
        distributions = DistributionsRpmApi(client)

        body = gen_rpm_remote()
        remote = remote_api.create(body)

        repo = repo_api.create(gen_repo())

        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href)
        sync_response = repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)

        # Step 1
        repo = repo_api.read(repo.pulp_href)
        repo_content = get_content(
            repo.to_dict())[RPM_PACKAGE_CONTENT_NAME][:-1]
        for rpm_content in repo_content:
            modify_repo(cfg, repo.to_dict(), remove_units=[rpm_content])
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(repo.to_dict()))
        non_latest = choice(version_hrefs[1:-1])

        # Step 2
        publish_data = RpmRpmPublication(repository=repo.pulp_href)
        publish_response = publications.create(publish_data)
        created_resources = monitor_task(
            publish_response.task).created_resources
        publication_href = created_resources[0]
        publication = publications.read(publication_href)

        # Step 3
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        # Step 4
        publish_data.repository_version = non_latest
        publish_data.repository = None
        publish_response = publications.create(publish_data)
        created_resources = monitor_task(
            publish_response.task).created_resources
        publication_href = created_resources[0]
        publication = publications.read(publication_href)

        # Step 5
        body = gen_distribution()
        body["base_path"] = "pulp_pre_upgrade_test"
        body["publication"] = publication.pulp_href

        distribution_response = distributions.create(body)
        created_resources = monitor_task(
            distribution_response.task).created_resources
        distribution = distributions.read(created_resources[0])

        # Step 6
        self.assertEqual(publication.repository_version, non_latest)

        # Step 7
        with self.assertRaises(ApiException):
            body = {
                "repository": repo.pulp_href,
                "repository_version": non_latest
            }
            publications.create(body)

        # Step 8
        url = cfg.get_content_host_base_url(
        ) + "/pulp/content/pulp_pre_upgrade_test/"
        self.assertEqual(url, distribution.base_url, url)
示例#14
0
    def setUpClass(cls):
        """Create class-wide variables.

        1. Create a repository.
        2. Create a remote pointing to external registry.
        3. Sync the repository using the remote and re-read the repo data.
        4. Create a container distribution to serve the repository
        5. Create another container distribution to the serve the repository version

        This tests targets the following issue:

        * `Pulp #4460 <https://pulp.plan.io/issues/4460>`_
        """
        cls.cfg = config.get_config()
        cls.registry_name = urlparse(cls.cfg.get_base_url()).netloc

        cls.client = api.Client(cls.cfg, api.code_handler)
        client_api = gen_container_client()
        cls.repositories_api = RepositoriesContainerApi(client_api)
        cls.remotes_api = RemotesContainerApi(client_api)
        cls.distributions_api = DistributionsContainerApi(client_api)

        cls.teardown_cleanups = []

        delete_orphans()

        with contextlib.ExitStack() as stack:
            # ensure tearDownClass runs if an error occurs here
            stack.callback(cls.tearDownClass)

            # Step 1
            _repo = cls.repositories_api.create(ContainerContainerRepository(**gen_repo()))
            cls.teardown_cleanups.append((cls.repositories_api.delete, _repo.pulp_href))

            # Step 2
            cls.remote = cls.remotes_api.create(gen_container_remote())
            cls.teardown_cleanups.append((cls.remotes_api.delete, cls.remote.pulp_href))

            # Step 3
            sync_data = RepositorySyncURL(remote=cls.remote.pulp_href)
            sync_response = cls.repositories_api.sync(_repo.pulp_href, sync_data)
            monitor_task(sync_response.task)
            cls.repo = cls.repositories_api.read(_repo.pulp_href)

            # Step 4.
            distribution_response = cls.distributions_api.create(
                ContainerContainerDistribution(**gen_distribution(repository=cls.repo.pulp_href))
            )
            created_resources = monitor_task(distribution_response.task).created_resources
            distribution = cls.distributions_api.read(created_resources[0])
            cls.distribution_with_repo = cls.distributions_api.read(distribution.pulp_href)
            cls.teardown_cleanups.append(
                (cls.distributions_api.delete, cls.distribution_with_repo.pulp_href)
            )

            # Step 5.
            distribution_response = cls.distributions_api.create(
                ContainerContainerDistribution(
                    **gen_distribution(repository_version=cls.repo.latest_version_href)
                )
            )
            created_resources = monitor_task(distribution_response.task).created_resources
            distribution = cls.distributions_api.read(created_resources[0])
            cls.distribution_with_repo_version = cls.distributions_api.read(distribution.pulp_href)
            cls.teardown_cleanups.append(
                (cls.distributions_api.delete, cls.distribution_with_repo_version.pulp_href)
            )

            # remove callback if everything goes well
            stack.pop_all()
示例#15
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Remote.
           NOTE: content unit for docker is `image` or `Layer`

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_docker_remote()
        remote = client.post(DOCKER_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publisher.
        publisher = client.post(DOCKER_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        # Create a publication.
        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.post(DOCKER_DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution['_href'])

        # Get local checksums for content synced from remote registy
        checksums = [
            content['digest'] for content
            in get_content(repo)['docker.manifest-blob']
        ]

        # Assert that at least one layer is synced from remote:latest
        # and the checksum matched with remote
        self.assertTrue(
            any(
                [
                    result['blobSum'] in checksums
                    for result in get_docker_hub_remote_blobsums()
                ]
            )
        )
示例#16
0
    def do_test(self, mirror):
        """Sync and publish an RPM repository and verify the metadata is what was expected."""
        from configparser import ConfigParser

        # 1. create repo and remote
        repo = self.repo_api.create(gen_repo(autopublish=not mirror))
        self.addCleanup(self.repo_api.delete, repo.pulp_href)

        body = gen_rpm_remote(RPM_KICKSTART_FIXTURE_URL, policy="on_demand")
        remote = self.remote_api.create(body)
        self.addCleanup(self.remote_api.delete, remote.pulp_href)

        # 2, 3. Sync and publish
        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href,
                                                    mirror=mirror)
        sync_response = self.repo_api.sync(repo.pulp_href,
                                           repository_sync_data)
        created_resources = monitor_task(sync_response.task).created_resources

        publication_href = [
            r for r in created_resources if "publication" in r
        ][0]

        self.addCleanup(self.publications.delete, publication_href)

        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = self.distributions.create(body)
        created_resources = monitor_task(
            distribution_response.task).created_resources
        distribution = self.distributions.read(created_resources[0])
        self.addCleanup(self.distributions.delete, distribution.pulp_href)

        # 4. Download and parse the metadata.
        original_treeinfo = http_get(
            os.path.join(RPM_KICKSTART_FIXTURE_URL, ".treeinfo"))
        generated_treeinfo = http_get(
            os.path.join(distribution.base_url, ".treeinfo"))

        config = ConfigParser()
        config.optionxform = str  # by default it will cast keys to lower case
        config.read_string(original_treeinfo.decode("utf-8"))
        original_treeinfo = config._sections

        config = ConfigParser()
        config.optionxform = str  # by default it will cast keys to lower case
        config.read_string(generated_treeinfo.decode("utf-8"))
        generated_treeinfo = config._sections

        # 5, 6. Re-arrange the metadata so that it can be compared, and do the comparison.
        # TODO: These really should be in the same order they were in originally.
        # https://pulp.plan.io/issues/9208
        for metadata_dict in [original_treeinfo, generated_treeinfo]:
            metadata_dict["general"]["variants"] = ",".join(
                sorted(metadata_dict["general"]["variants"].split(",")))
            metadata_dict["tree"]["variants"] = ",".join(
                sorted(metadata_dict["tree"]["variants"].split(",")))

        diff = dictdiffer.diff(original_treeinfo, generated_treeinfo)
        differences = []

        # skip any differences that are "correct" i.e. rewritten "repository" and "packages" paths
        for d in diff:
            (diff_type, diff_name, _, new_value) = (d[0], d[1], d[2][0],
                                                    d[2][1])
            # ('change', 'variant-Land.packages', ('Packages', 'Land/Packages'))
            if diff_type == "change":
                if diff_name.endswith(".packages") or diff_name.endswith(
                        ".repository"):
                    # TODO: this is ignoring problems with the generated metadata
                    # https://pulp.plan.io/issues/9208
                    if "../" not in new_value:
                        continue

            differences.append(d)

        self.assertListEqual(differences, [], differences)

        # 7. Try downloading the files listed in the .treeinfo metadata, make sure they're
        # actually there.
        for path, checksum in original_treeinfo["checksums"].items():
            if path.startswith("fixtures"):
                # TODO: the .treeinfo metadata is actually wrong for these files, so we can't
                # check them because they won't be there.
                continue

            checksum_type, checksum = checksum.split(":")
            http_get(os.path.join(distribution.base_url, path))
示例#17
0
    def do_test(self, url, policy="on_demand"):
        """Verify whether content served by pulp can be synced.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Sync other repository using as remote url,
        the distribution base_url from the previous repository.

        """
        client = gen_rpm_client()
        repo_api = RepositoriesRpmApi(client)
        remote_api = RemotesRpmApi(client)
        publications = PublicationsRpmApi(client)
        distributions = DistributionsRpmApi(client)

        repo = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo.pulp_href)

        body = gen_rpm_remote(url=url, policy=policy)
        remote = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote.pulp_href)

        # Sync a Repository
        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href)
        sync_response = repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        sync_task = tasks.read(sync_response.task)
        task_duration = sync_task.finished_at - sync_task.started_at
        waiting_time = sync_task.started_at - sync_task.pulp_created
        print(
            "\n->     Sync => Waiting time (s): {wait} | Service time (s): {service}".format(
                wait=waiting_time.total_seconds(), service=task_duration.total_seconds()
            )
        )
        repo = repo_api.read(repo.pulp_href)

        # Create a publication.
        publish_data = RpmRpmPublication(repository=repo.pulp_href)
        publish_response = publications.create(publish_data)
        created_resources = monitor_task(publish_response.task).created_resources
        publication_href = created_resources[0]
        self.addCleanup(publications.delete, publication_href)

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = distributions.create(body)
        created_resources = monitor_task(distribution_response.task).created_resources
        distribution = distributions.read(created_resources[0])
        self.addCleanup(distributions.delete, distribution.pulp_href)

        # Create another repo pointing to distribution base_url
        repo2 = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo2.pulp_href)

        body = gen_rpm_remote(url=distribution.base_url, policy=policy)
        remote2 = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote2.pulp_href)

        # Sync a Repository
        repository_sync_data = RpmRepositorySyncURL(remote=remote2.pulp_href)
        sync_response = repo_api.sync(repo2.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        sync_task = tasks.read(sync_response.task)
        task_duration = sync_task.finished_at - sync_task.started_at
        waiting_time = sync_task.started_at - sync_task.pulp_created
        print(
            "\n->     Sync => Waiting time (s): {wait} | Service time (s): {service}".format(
                wait=waiting_time.total_seconds(), service=task_duration.total_seconds()
            )
        )
        repo2 = repo_api.read(repo2.pulp_href)

        summary = get_content_summary(repo.to_dict())
        summary2 = get_content_summary(repo2.to_dict())
        self.assertDictEqual(summary, summary2)

        added = get_added_content_summary(repo.to_dict())
        added2 = get_added_content_summary(repo2.to_dict())
        self.assertDictEqual(added, added2)
示例#18
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_ansible_remote()
        remote = client.post(ANSIBLE_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publisher.
        publisher = client.post(ANSIBLE_PUBLISHER_PATH,
                                gen_ansible_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        # Create a publication.
        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.post(ANSIBLE_DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution['_href'])

        # Pick a content unit, and download it from both Pulp Fixtures…
        unit_path = choice(get_ansible_content_paths(repo))
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(ANSIBLE_FIXTURE_URL,
                                   unit_path))).hexdigest()

        # …and Pulp.
        client.response_handler = api.safe_handler

        unit_url = cfg.get_hosts('api')[0].roles['api']['scheme']
        unit_url += '://' + distribution['base_url'] + '/'
        unit_url = urljoin(unit_url, unit_path)

        pulp_hash = hashlib.sha256(client.get(unit_url).content).hexdigest()
        self.assertEqual(fixtures_hash, pulp_hash)
示例#19
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available. For the cookbook plugin
           this is below a live API at ``pulp_cookbook/market/`` e.g.
           ``http://example.com/pulp_cookbook/market/foo/`` and
           ``http://example.com/pulp_cookbook/market/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_remote(fixture_u1.url)
        remote = client.post(COOKBOOK_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publisher.
        publisher = client.post(COOKBOOK_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        # Create a publication.
        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.post(DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution['_href'])

        # pulp_cookbook universe live endpoint contains
        # all cookbooks
        distribution_base_url = cfg.get_hosts('api')[0].roles['api']['scheme']
        distribution_base_url += '://' + distribution['base_url'] + '/'

        api_host_cfg = cfg.get_hosts('api')[0]
        universe_url = api_host_cfg.roles['api']['scheme']
        universe_url += '://' + api_host_cfg.hostname
        universe_url += ':{}'.format(api_host_cfg.roles['api']['port'])
        universe_url += COOKBOOK_BASE_LIVE_API_PATH
        universe_url += distribution['base_path'] + '/universe'

        universe = client.get(universe_url)

        for content, publication_path in get_content_and_unit_paths(repo):
            u_url = universe[content['name']][
                content['version']]['download_url']
            self.assertEqual(u_url,
                             urljoin(distribution_base_url, publication_path))

        # Pick a cookbook, and download it from both Fixtures…
        _, unit_path = choice(get_content_and_unit_paths(repo))
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(fixture_u1.url, unit_path))).hexdigest()

        # …and Pulp content
        client.response_handler = api.safe_handler

        unit_url = urljoin(distribution_base_url, unit_path)

        pulp_hash = hashlib.sha256(client.get(unit_url).content).hexdigest()
        self.assertEqual(fixtures_hash, pulp_hash)
示例#20
0
    def do_publish(self, expected_values, modus):
        """Publish particular repository in flat format.

        1. Create a repository in flat repo format.
        2. Create a publication.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Assert that Release file path is equal to desired file path.
        5. Assert that the codename, suite and component are as expected.
        """
        # Create a repository:
        repo = deb_repository_api.create(gen_repo())
        self.addCleanup(deb_repository_api.delete, repo.pulp_href)

        # Create a remote:
        body = gen_deb_remote(  # DEB_FLAT_REPO_FIXTURE_URL
            url=DEB_FLAT_REPO_FIXTURE_URL,
            distributions=expected_values["distribution"])
        remote = deb_remote_api.create(body)
        self.addCleanup(deb_remote_api.delete, remote.pulp_href)

        # Sync the repository:
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = deb_repository_api.sync(repo.pulp_href,
                                                repository_sync_data)
        monitor_task(sync_response.task)
        repo = deb_repository_api.read(repo.pulp_href)
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(repo.to_dict()))

        self.assertIsNotNone(repo.latest_version_href)

        # Create a publication:
        if modus == "verbatim":
            publication_api = deb_verbatim_publication_api
            Publication = DebVerbatimPublication
        else:
            publication_api = deb_apt_publication_api
            Publication = DebAptPublication

        publish_data = Publication(repository=repo.pulp_href,
                                   **self._publication_extra_args(modus))
        publish_response = publication_api.create(publish_data)
        publication_href = monitor_task(
            publish_response.task).created_resources[0]
        self.addCleanup(publication_api.delete, publication_href)
        publication = publication_api.read(publication_href)

        # Test the publication:
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        release_file = get_content(repo=publication.to_dict(),
                                   version_href=publication.repository_version
                                   )[DEB_RELEASE_FILE_NAME][0]

        release_file_path = os.path.join(
            expected_values["release_file_folder_sync"], "Release")
        self.assertEqual(release_file_path, release_file["relative_path"])
        self.assertEqual(expected_values["distribution"],
                         release_file["distribution"])
        self.assertEqual(expected_values["codename"], release_file["codename"])
        self.assertEqual(expected_values["suite"], release_file["suite"])

        release = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version)[DEB_RELEASE_NAME][0]

        self.assertEqual(expected_values["distribution"],
                         release["distribution"])
        self.assertEqual(expected_values["codename"], release["codename"])
        self.assertEqual(expected_values["suite"], release["suite"])

        components = get_content(repo=publication.to_dict(),
                                 version_href=publication.repository_version
                                 )[DEB_RELEASE_COMPONENT_NAME]

        self.assertEqual(len(expected_values["components"]), len(components))
        for component in components:
            self.assertIn(component["component"],
                          expected_values["components"])

        package_indecies = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version
        )[DEB_PACKAGE_INDEX_NAME]

        self.assertEqual(len(expected_values["package_index_paths_sync"]),
                         len(package_indecies))
        for package_index in package_indecies:
            self.assertIn(package_index["relative_path"],
                          expected_values["package_index_paths_sync"])

        # Create a distribution:
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = deb_distribution_api.create(body)
        distribution_href = monitor_task(
            distribution_response.task).created_resources[0]
        distribution = deb_distribution_api.read(distribution_href)
        self.addCleanup(deb_distribution_api.delete, distribution.pulp_href)

        # Check that the expected Release files and package indecies are there:
        cfg = config.get_config()
        release_file_path = os.path.join(
            expected_values["release_file_folder_dist"], "Release")
        download_content_unit(cfg, distribution.to_dict(), release_file_path)
        for package_index_path in expected_values["package_index_paths_dist"]:
            download_content_unit(cfg, distribution.to_dict(),
                                  package_index_path)
示例#21
0
    def do_test(self, policy):
        """Verify whether content served by pulp can be synced.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Sync other repository using as remote url,
        the distribution base_url from the previous repository.

        """
        client = gen_rpm_client()
        repo_api = RepositoriesRpmApi(client)
        remote_api = RemotesRpmApi(client)
        publications = PublicationsRpmApi(client)
        distributions = DistributionsRpmApi(client)

        repo = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo.pulp_href)

        body = gen_rpm_remote(url=RPM_KICKSTART_FIXTURE_URL, policy=policy)
        remote = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote.pulp_href)

        # Sync a Repository
        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href)
        sync_response = repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo = repo_api.read(repo.pulp_href)

        # Create a publication.
        publish_data = RpmRpmPublication(
            repository=repo.pulp_href,
            metadata_checksum_type="sha1",
            package_checksum_type="sha224",
        )
        publish_response = publications.create(publish_data)
        created_resources = monitor_task(publish_response.task).created_resources
        publication_href = created_resources[0]
        self.addCleanup(publications.delete, publication_href)

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = distributions.create(body)
        created_resources = monitor_task(distribution_response.task).created_resources
        distribution = distributions.read(created_resources[0])
        self.addCleanup(distributions.delete, distribution.pulp_href)

        # Create another repo pointing to distribution base_url
        repo2 = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo2.pulp_href)

        body = gen_rpm_remote(url=distribution.base_url, policy=policy)
        remote2 = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote2.pulp_href)

        # Sync a Repository
        repository_sync_data = RpmRepositorySyncURL(remote=remote2.pulp_href)
        sync_response = repo_api.sync(repo2.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo2 = repo_api.read(repo2.pulp_href)

        summary = get_content_summary(repo.to_dict())
        summary2 = get_content_summary(repo2.to_dict())
        self.assertDictEqual(summary, summary2)

        added = get_added_content_summary(repo.to_dict())
        added2 = get_added_content_summary(repo2.to_dict())
        self.assertDictEqual(added, added2)
示例#22
0
    def test_content_app_returns_404(self):
        """Test that content app returns 404 on wrong url.

        This test targets the following issue: 4278

        * `<https://pulp.plan.io/issues/4278>`_

        Do the following:

        1. Create a repository that has at least one repository version.
        2. Create a publisher.
        3. Create a distribution and set the repository and publisher to the
           previous created ones.
        4. Create a publication using the latest repository version.
        5. Verify that the content app serves 404 responses.
        """
        self.assertGreaterEqual(len(self.contents), 2, self.contents)

        # Create a repository.
        repo = self.client.post(REPO_PATH, gen_repo())
        self.addCleanup(self.client.delete, repo['_href'])
        self.client.post(repo['_versions_href'],
                         {'add_content_units': [self.contents[0]['_href']]})
        repo = self.client.get(repo['_href'])

        # Create publisher.
        publisher = self.client.post(FILE_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(self.client.delete, publisher['_href'])

        # Create a distribution
        body = gen_distribution()
        body['repository'] = repo['_href']
        body['publisher'] = publisher['_href']

        response_dict = self.client.post(DISTRIBUTION_PATH, body)
        dist_task = self.client.get(response_dict['task'])
        distribution_href = dist_task['created_resources'][0]
        distribution = self.client.get(distribution_href)
        self.addCleanup(self.client.delete, distribution['_href'])

        last_version_href = get_versions(repo)[-1]['_href']
        publication = publish(self.cfg, publisher, repo, last_version_href)

        self.addCleanup(self.client.delete, publication['_href'])
        distribution = self.client.get(distribution['_href'])

        # Verify 404 response for wrong url of the distribution
        unit_path = 'i-do-not-exist'
        unit_url = self.cfg.get_hosts('api')[0].roles['api']['scheme']
        unit_url += '://' + distribution['base_url'] + '-WRONG/'
        unit_url = urljoin(unit_url, unit_path)

        self.client.response_handler = api.safe_handler
        with self.assertRaisesRegex(HTTPError, r'^404'):
            self.client.get(unit_url).content

        # Verify 404 response for wrong url inside the distribution
        unit_path = 'i-do-not-exist'
        unit_url = self.cfg.get_hosts('api')[0].roles['api']['scheme']
        unit_url += '://' + distribution['base_url'] + '/'
        unit_url = urljoin(unit_url, unit_path)

        self.client.response_handler = api.safe_handler
        with self.assertRaisesRegex(HTTPError, r'^404'):
            self.client.get(unit_url).content
示例#23
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = gen_rpm_client()
        repo_api = RepositoriesRpmApi(client)
        remote_api = RemotesRpmApi(client)
        publications = PublicationsRpmApi(client)
        distributions = DistributionsRpmApi(client)

        repo = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo.pulp_href)

        body = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL)
        remote = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote.pulp_href)

        # Sync a Repository
        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href)
        sync_response = repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo = repo_api.read(repo.pulp_href)

        # Create a publication.
        publish_data = RpmRpmPublication(repository=repo.pulp_href)
        publish_response = publications.create(publish_data)
        created_resources = monitor_task(publish_response.task)
        publication_href = created_resources[0]
        self.addCleanup(publications.delete, publication_href)

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = distributions.create(body)
        created_resources = monitor_task(distribution_response.task)
        distribution = distributions.read(created_resources[0])
        self.addCleanup(distributions.delete, distribution.pulp_href)

        # Pick a content unit (of each type), and download it from both Pulp Fixtures…
        unit_path = choice(get_rpm_package_paths(repo.to_dict()))
        fixture_hash = hashlib.sha256(
            utils.http_get(urljoin(RPM_UNSIGNED_FIXTURE_URL, unit_path))
        ).hexdigest()

        # …and Pulp.
        pkg_path = get_package_repo_path(unit_path)
        content = download_content_unit(cfg, distribution.to_dict(), pkg_path)
        pulp_hash = hashlib.sha256(content).hexdigest()

        self.assertEqual(fixture_hash, pulp_hash)
示例#24
0
    def do_test(self, policy):
        """Verify whether content served by Pulp can be synced.

        The initial sync to Pulp is one of many different download policies, the second sync is
        immediate in order to exercise downloading all of the files.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Sync other repository using as remote url,
        the distribution base_url from the previous repository.

        """
        client = gen_rpm_client()
        repo_api = RepositoriesRpmApi(client)
        remote_api = RemotesRpmApi(client)
        publications = PublicationsRpmApi(client)
        distributions = DistributionsRpmApi(client)

        repo = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo.pulp_href)

        body = gen_rpm_remote(url=RPM_KICKSTART_FIXTURE_URL, policy=policy)
        remote = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote.pulp_href)

        # Sync a Repository
        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href)
        sync_response = repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo = repo_api.read(repo.pulp_href)

        # Create a publication.
        publish_data = RpmRpmPublication(
            repository=repo.pulp_href,
            metadata_checksum_type="sha384",
            package_checksum_type="sha224",
        )
        publish_response = publications.create(publish_data)
        created_resources = monitor_task(publish_response.task).created_resources
        publication_href = created_resources[0]
        self.addCleanup(publications.delete, publication_href)

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = distributions.create(body)
        created_resources = monitor_task(distribution_response.task).created_resources
        distribution = distributions.read(created_resources[0])
        self.addCleanup(distributions.delete, distribution.pulp_href)

        # Create another repo pointing to distribution base_url
        repo2 = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo2.pulp_href)

        body = gen_rpm_remote(url=distribution.base_url, policy="immediate")
        remote2 = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote2.pulp_href)

        # Sync a Repository
        repository_sync_data = RpmRepositorySyncURL(remote=remote2.pulp_href)
        sync_response = repo_api.sync(repo2.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo2 = repo_api.read(repo2.pulp_href)

        summary = get_content_summary(repo.to_dict())
        summary2 = get_content_summary(repo2.to_dict())
        self.assertDictEqual(summary, summary2)

        added = get_added_content_summary(repo.to_dict())
        added2 = get_added_content_summary(repo2.to_dict())
        self.assertDictEqual(added, added2)
示例#25
0
    def test_all(self):
        """Test content promotion for a distribution.

        This test targets the following issue:

        * `Pulp #4186 <https://pulp.plan.io/issues/4186>`_

        Do the following:

        1. Create a repository that has at least one repository version.
        2. Create a publisher, and publication.
        3. Create 2 distributions - using the same publication. Those
           distributions will have different ``base_path``.
        4. Assert that distributions have the same publication.
        5. Select a content unit. Download that content unit from Pulp using
           the two different distributions.
           Assert that content unit has the same checksum when fetched from
           different distributions.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        remote = client.post(
            FILE_REMOTE_PATH,
            gen_remote(FILE_FIXTURE_MANIFEST_URL)
        )
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        distributions = []
        for _ in range(2):
            body = gen_distribution()
            body['publication'] = publication['_href']
            distribution = client.post(DISTRIBUTION_PATH, body)
            distributions.append(distribution)
            self.addCleanup(client.delete, distribution['_href'])

        self.assertEqual(
            distributions[0]['publication'],
            distributions[1]['publication'],
            distributions
        )

        unit_urls = []
        unit_path = get_added_content(repo)[FILE_CONTENT_NAME][0]['relative_path']
        for distribution in distributions:
            unit_url = cfg.get_hosts('api')[0].roles['api']['scheme']
            unit_url += '://' + distribution['base_url'] + '/'
            unit_urls.append(urljoin(unit_url, unit_path))

        client.response_handler = api.safe_handler
        self.assertEqual(
            hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(),
            hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(),
            unit_urls,
        )
    def test_repo_auto_distribution(self):
        """Test auto distribution of a repository.

        This test targets the following issue:

        * `Pulp Smash #947 <https://github.com/PulpQE/pulp-smash/issues/947>`_

        Do the following:

        1. Create a repository that has at least one repository version.
        2. Create a publisher.
        3. Create a distribution and set the repository and publishera to the
           previous created ones.
        4. Create a publication using the latest repository version.
        5. Assert that the previous distribution has a  ``publication`` set as
           the one created in step 4.
        6. Create a new repository version by adding content to the repository.
        7. Create another publication using the just created repository
           version.
        8. Assert that distribution now has the ``publication`` set to the
           publication created in the step 7.
        9. Verify that content added in the step 7 is now available to download
           from distribution, and verify that the content unit has the same
           checksum when fetched directly from Pulp-Fixtures.
        """
        self.assertGreaterEqual(len(self.contents), 2, self.contents)

        # Create a repository.
        repo = self.client.post(REPO_PATH, gen_repo())
        self.addCleanup(self.client.delete, repo['_href'])
        self.client.post(repo['_versions_href'],
                         {'add_content_units': [self.contents[0]['_href']]})
        repo = self.client.get(repo['_href'])

        # Create publisher.
        publisher = self.client.post(FILE_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(self.client.delete, publisher['_href'])

        # Create a distribution
        body = gen_distribution()
        body['repository'] = repo['_href']
        body['publisher'] = publisher['_href']
        distribution = self.client.post(DISTRIBUTION_PATH, body)
        self.addCleanup(self.client.delete, distribution['_href'])
        last_version_href = get_versions(repo)[-1]['_href']
        publication = publish(self.cfg, publisher, repo, last_version_href)
        self.addCleanup(self.client.delete, publication['_href'])
        distribution = self.client.get(distribution['_href'])

        # Assert that distribution was updated as per step 5.
        self.assertEqual(distribution['publication'], publication['_href'])

        # Create a new repository version.
        self.client.post(repo['_versions_href'],
                         {'add_content_units': [self.contents[1]['_href']]})
        repo = self.client.get(repo['_href'])
        last_version_href = get_versions(repo)[-1]['_href']
        publication = publish(self.cfg, publisher, repo, last_version_href)
        self.addCleanup(self.client.delete, publication['_href'])
        distribution = self.client.get(distribution['_href'])

        # Assert that distribution was updated as per step 8.
        self.assertEqual(distribution['publication'], publication['_href'])
        unit_path = get_added_content(repo,
                                      last_version_href)[0]['relative_path']
        unit_url = self.cfg.get_hosts('api')[0].roles['api']['scheme']
        unit_url += '://' + distribution['base_url'] + '/'
        unit_url = urljoin(unit_url, unit_path)

        self.client.response_handler = api.safe_handler
        pulp_hash = hashlib.sha256(
            self.client.get(unit_url).content).hexdigest()
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(FILE_URL, unit_path))).hexdigest()

        # Verify checksum. Step 9.
        self.assertEqual(fixtures_hash, pulp_hash)
示例#27
0
    def test_all(self):
        """
        Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/pub-name/`` and
           ``https://example.com/content/pub-name/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_python_remote(PYTHON_FIXTURES_URL)
        remote = client.post(PYTHON_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publication
        publication = gen_python_publication(cfg, repository=repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.using_handler(api.task_handler).post(
            PYTHON_DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution['_href'])

        # Pick a file, and download it from both Pulp Fixtures…
        unit_path = choice(get_python_content_paths(repo))
        fixtures_hash = hashlib.sha256(
            utils.http_get(
                urljoin(urljoin(PYTHON_FIXTURES_URL, 'packages/'),
                        unit_path))).hexdigest()

        # …and Pulp.
        content = download_content_unit(cfg, distribution, unit_path)
        pulp_hash = hashlib.sha256(content).hexdigest()

        self.assertEqual(fixtures_hash, pulp_hash)
    def do_publish(self, expected_values):
        """Publish particular repository with missing package indices.

        1. Create a repository with missing package indices.
        2. Create a publication.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Assert that InRelease file path is equal to desired file path.
        5. Assert that the codename, suite and component are as expected.
        """
        # Create a repository:
        repo = deb_repository_api.create(gen_repo())
        self.addCleanup(deb_repository_api.delete, repo.pulp_href)

        # Create a remote:
        body = gen_deb_remote(
            url=DEB_MISSING_ARCH_DISTS_FIXTURE_URL,
            distributions=expected_values["distribution"],
            ignore_missing_package_indices=True,
        )
        remote = deb_remote_api.create(body)
        self.addCleanup(deb_remote_api.delete, remote.pulp_href)

        # Sync the repository:
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = deb_repository_api.sync(repo.pulp_href,
                                                repository_sync_data)
        monitor_task(sync_response.task)
        repo = deb_repository_api.read(repo.pulp_href)
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(repo.to_dict()))

        # Create a publication:
        publish_data = DebAptPublication(repository=repo.pulp_href,
                                         **self._publication_extra_args())
        publish_response = deb_apt_publication_api.create(publish_data)
        publication_href = monitor_task(
            publish_response.task).created_resources[0]
        self.addCleanup(deb_apt_publication_api.delete, publication_href)
        publication = deb_apt_publication_api.read(publication_href)

        # Test the publication:
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        release_file = get_content(repo=publication.to_dict(),
                                   version_href=publication.repository_version
                                   )[DEB_RELEASE_FILE_NAME][0]

        release_file_path = os.path.join(
            expected_values["release_file_folder"], "InRelease")
        self.assertEqual(release_file_path, release_file["relative_path"])
        self.assertEqual(expected_values["distribution"],
                         release_file["distribution"])
        self.assertEqual(expected_values["codename"], release_file["codename"])
        self.assertEqual(expected_values["suite"], release_file["suite"])

        release = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version)[DEB_RELEASE_NAME][0]

        self.assertEqual(expected_values["distribution"],
                         release["distribution"])
        self.assertEqual(expected_values["codename"], release["codename"])
        self.assertEqual(expected_values["suite"], release["suite"])

        components = get_content(repo=publication.to_dict(),
                                 version_href=publication.repository_version
                                 )[DEB_RELEASE_COMPONENT_NAME]

        self.assertEqual({c["component"]
                          for c in components},
                         set(expected_values["components"]))

        package_indices = get_content(
            repo=publication.to_dict(),
            version_href=publication.repository_version
        )[DEB_PACKAGE_INDEX_NAME]

        # Packages has index in release file but may not be there
        self.assertNotEqual(len(expected_values["package_index_paths"]),
                            len(package_indices))
        for package_index in package_indices:  # all existing Packages files are there
            is_true = False
            for package_index_expected in expected_values[
                    "package_index_paths"]:
                if package_index["relative_path"] == os.path.join(
                        package_index_expected, "Packages"):
                    is_true = True
            self.assertTrue(is_true)

        self.assertFalse(
            os.path.isdir(
                os.path.join(remote.url,
                             "dists/ragnarok/asgard/binary-armeb")))
        self.assertFalse(
            os.path.isdir(
                os.path.join(remote.url,
                             "dists/ragnarok/jotunheimr/binary-armeb")))

        # Create a distribution:
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = deb_distribution_api.create(body)
        distribution_href = monitor_task(
            distribution_response.task).created_resources[0]
        distribution = deb_distribution_api.read(distribution_href)
        self.addCleanup(deb_distribution_api.delete, distribution.pulp_href)

        # Check that the expected Release files and package indices are there:
        cfg = config.get_config()
        release_file_path = os.path.join(
            expected_values["release_file_folder"], "Release")
        download_content_unit(cfg, distribution.to_dict(), release_file_path)

        for package_index_path in expected_values["package_index_paths"]:
            download_content_unit(cfg, distribution.to_dict(),
                                  package_index_path + "/Packages")
示例#29
0
    def test_all(self):
        """Test whether a particular repository version can be published.

        1. Create a repository with at least 2 repository versions.
        2. Create a publication by supplying the latest ``repository_version``.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Create a publication by supplying the non-latest ``repository_version``.
        5. Create distribution.
        6. Assert that the publication ``repository_version`` attribute points
           to the supplied repository version.
        7. Assert that an exception is raised when providing two different
           repository versions to be published at same time.
        """
        # Step 1
        repo_content = get_content(self.repo.to_dict())[PYTHON_CONTENT_NAME]
        print(repo_content)
        for file_content in repo_content:
            repository_modify_data = RepositoryAddRemoveContent(
                remove_content_units=[file_content["pulp_href"]])
            modify_response = self.repo_api.modify(self.repo.pulp_href,
                                                   repository_modify_data)
            monitor_task(modify_response.task)
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(self.repo.to_dict()))
        print(version_hrefs)
        non_latest = choice(version_hrefs[:-1])

        # Step 2
        publish_data = PythonPythonPublication(repository=self.repo.pulp_href)
        publication = self.create_publication(publish_data)

        # Step 3
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        # Step 4
        publish_data = PythonPythonPublication(repository_version=non_latest)
        publication = self.create_publication(publish_data)

        # Step 5
        body = gen_distribution()
        body["base_path"] = "pulp_post_upgrade_test"
        body["publication"] = publication.pulp_href

        distribution_response = self.distributions.create(body)
        created_resources = monitor_task(
            distribution_response.task).created_resources
        distribution = self.distributions.read(created_resources[0])

        # Step 6
        self.assertEqual(publication.repository_version, non_latest)

        # Step 7
        with self.assertRaises(ApiException):
            body = {
                "repository": self.repo.pulp_href,
                "repository_version": non_latest
            }
            self.publications.create(body)

        # Step 8
        self.assertIn("/pypi/pulp_post_upgrade_test/", distribution.base_url)
示例#30
0
    def test_v3_sync(self):
        """Test syncing Pulp to Pulp over v3 api."""
        repo = self.repo_api.create(gen_repo())
        self.addCleanup(self.repo_api.delete, repo.pulp_href)

        body = gen_ansible_remote(url=ANSIBLE_COLLECTION_PULP_URL_V2)
        remote = self.remote_collection_api.create(body)
        self.addCleanup(self.remote_collection_api.delete, remote.pulp_href)

        # Sync the repository.
        self.assertEqual(repo.latest_version_href,
                         f"{repo.pulp_href}versions/0/")
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = self.repo_api.sync(repo.pulp_href,
                                           repository_sync_data)
        monitor_task(sync_response.task)
        repo = self.repo_api.read(repo.pulp_href)
        self.assertEqual(repo.latest_version_href,
                         f"{repo.pulp_href}versions/1/")

        # Create a distribution.
        body = gen_distribution()
        body["repository"] = repo.pulp_href
        distribution_create = self.distributions_api.create(body)
        distribution_url = monitor_task(distribution_create.task)
        distribution = self.distributions_api.read(distribution_url[0])
        original_path = distribution.base_path

        self.addCleanup(self.distributions_api.delete, distribution.pulp_href)

        # Create a second repo.
        mirror_repo = self.repo_api.create(gen_repo())
        self.addCleanup(self.repo_api.delete, mirror_repo.pulp_href)

        COLLECTION_VERSION = "3.6.2"

        url = (distribution.client_url + "/api/v3/collections/" +
               PULP_INSTALLER_COLLECTION.replace(".", "/") + "/versions/" +
               COLLECTION_VERSION)
        body = gen_ansible_remote(url=url)
        remote = self.remote_collection_api.create(body)
        self.addCleanup(self.remote_collection_api.delete, remote.pulp_href)

        # Sync the second repository.
        self.assertEqual(mirror_repo.latest_version_href,
                         f"{mirror_repo.pulp_href}versions/0/")
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = self.repo_api.sync(mirror_repo.pulp_href,
                                           repository_sync_data)
        monitor_task(sync_response.task)
        mirror_repo = self.repo_api.read(mirror_repo.pulp_href)
        self.assertEqual(mirror_repo.latest_version_href,
                         f"{mirror_repo.pulp_href}versions/1/")

        # Create a distribution.
        body = gen_distribution()
        body["repository"] = mirror_repo.pulp_href
        distribution_create = self.distributions_api.create(body)
        distribution_url = monitor_task(distribution_create.task)
        distribution = self.distributions_api.read(distribution_url[0])
        mirror_path = distribution.base_path

        self.addCleanup(self.distributions_api.delete, distribution.pulp_href)

        # Check content of both repos.
        original_content = self.collections_api.list(path=original_path)
        mirror_content = self.collections_api.list(path=mirror_path)

        self.assertEqual(mirror_content.data[0].highest_version["version"],
                         COLLECTION_VERSION)
        self.assertNotEqual(
            original_content.data[0].highest_version["version"],
            COLLECTION_VERSION,
        )
示例#31
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_deb_remote()
        remote = client.post(DEB_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publisher.
        publisher = client.post(self.Meta.publisher_path, self.Meta.gen_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        # Create a publication.
        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        response_dict = client.post(DISTRIBUTION_PATH, body)
        dist_task = client.get(response_dict['task'])
        distribution_href = dist_task['created_resources'][0]
        distribution = client.get(distribution_href)
        self.addCleanup(client.delete, distribution['_href'])

        # Pick a content unit (of each type), and download it from both Pulp Fixtures…
        unit_paths = [
            choice(paths) for paths in self.Meta.get_content_unit_paths(repo).values() if paths
        ]
        fixtures_hashes = [hashlib.sha256(
            utils.http_get(urljoin(DEB_FIXTURE_URL, unit_path[0]))
        ).hexdigest() for unit_path in unit_paths]

        # …and Pulp.
        client.response_handler = api.safe_handler

        unit_base_url = cfg.get_hosts('api')[0].roles['api']['scheme']
        unit_base_url += '://' + distribution['base_url'] + '/'
        unit_urls = [urljoin(unit_base_url, unit_path[1]) for unit_path in unit_paths]

        pulp_hashes = [hashlib.sha256(client.get(unit_url).content).hexdigest()
                       for unit_url in unit_urls]
        self.assertEqual(fixtures_hashes, pulp_hashes)
示例#32
0
 def _gen_ansible_distribution(ansible_repo):
     distro_data = gen_distribution(repository=ansible_repo.pulp_href)
     return gen_object_with_cleanup(ansible_distro_api_client, distro_data)
示例#33
0
    def do_test(self, policy):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(DEB_REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo["pulp_href"])

        body = gen_deb_remote()
        remote = client.post(DEB_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote["pulp_href"])

        sync(cfg, remote, repo)
        repo = client.get(repo["pulp_href"])

        # Create a publication.
        publication = self.Meta.create_publication(cfg, repo)
        self.addCleanup(client.delete, publication["pulp_href"])

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication["pulp_href"]
        distribution = client.using_handler(api.task_handler).post(
            self.Meta.DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution["pulp_href"])

        # Pick a content unit (of each type), and download it from both Pulp Fixtures…
        unit_paths = [
            choice(paths)
            for paths in self.Meta.get_content_unit_paths(repo).values()
            if paths
        ]
        fixtures_hashes = [
            hashlib.sha256(
                utils.http_get(urljoin(DEB_FIXTURE_URL,
                                       unit_path[0]))).hexdigest()
            for unit_path in unit_paths
        ]

        # …and Pulp.
        contents = [
            download_content_unit(cfg, distribution, unit_path[1])
            for unit_path in unit_paths
        ]
        pulp_hashes = [
            hashlib.sha256(content).hexdigest() for content in contents
        ]
        self.assertEqual(fixtures_hashes, pulp_hashes)
    def test_publish(self):
        """Publish particular empty repository with no packages.

        1. Create a repository with given distribtuions.
        2. Create a publication.
        3. Assert that the publication ``repository_version`` attribute points
           to the latest repository version.
        4. Assert that Package Index File is not empty.
        5. Assert that there are no packages.
        """
        # Create a repository:
        repo = deb_repository_api.create(gen_repo())
        self.addCleanup(deb_repository_api.delete, repo.pulp_href)

        # Create a remote:
        body = gen_deb_remote(url=DEB_FIXTURE_URL, distributions="ginnungagap")
        remote = deb_remote_api.create(body)
        self.addCleanup(deb_remote_api.delete, remote.pulp_href)

        # Sync the repository:
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = deb_repository_api.sync(repo.pulp_href,
                                                repository_sync_data)
        monitor_task(sync_response.task)
        repo = deb_repository_api.read(repo.pulp_href)
        version_hrefs = tuple(ver["pulp_href"]
                              for ver in get_versions(repo.to_dict()))

        self.assertIsNotNone(repo.latest_version_href)

        # Create a publication:
        publish_data = DebAptPublication(repository=repo.pulp_href,
                                         **self._publication_extra_args())
        publish_response = deb_apt_publication_api.create(publish_data)
        publication_href = monitor_task(
            publish_response.task).created_resources[0]
        self.addCleanup(deb_apt_publication_api.delete, publication_href)
        publication = deb_apt_publication_api.read(publication_href)

        # Test the publication:
        self.assertEqual(publication.repository_version, version_hrefs[-1])

        release = get_content(repo=publication.to_dict(),
                              version_href=publication.repository_version)

        package_index_paths = [
            "dists/ginnungagap/asgard/binary-ppc64/Packages",
            "dists/ginnungagap/jotunheimr/binary-armeb/Packages",
            "dists/ginnungagap/asgard/binary-armeb/Packages",
            "dists/ginnungagap/jotunheimr/binary-ppc64/Packages",
            "dists/default/all/binary-all/Packages",
        ]

        self.assertFalse(release[DEB_PACKAGE_NAME])
        self.assertTrue(release[DEB_PACKAGE_INDEX_NAME])
        self.assertEqual(
            len(package_index_paths) - 1, len(release[DEB_PACKAGE_INDEX_NAME]))

        # Create a distribution:
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = deb_distribution_api.create(body)
        distribution_href = monitor_task(
            distribution_response.task).created_resources[0]
        distribution = deb_distribution_api.read(distribution_href)
        self.addCleanup(deb_distribution_api.delete, distribution.pulp_href)

        # Check that the expected package indecies are there:
        cfg = config.get_config()
        for package_index_path in package_index_paths:
            download_content_unit(cfg, distribution.to_dict(),
                                  package_index_path)
示例#35
0
    def get_checksum_types(self, **kwargs):
        """Sync and publish an RPM repository."""
        package_checksum_type = kwargs.get("package_checksum_type")
        metadata_checksum_type = kwargs.get("metadata_checksum_type")
        policy = kwargs.get("policy", "immediate")
        # 1. create repo and remote
        repo = self.repo_api.create(gen_repo())
        self.addCleanup(self.repo_api.delete, repo.pulp_href)

        body = gen_rpm_remote(policy=policy)
        remote = self.remote_api.create(body)
        self.addCleanup(self.remote_api.delete, remote.pulp_href)

        # 2. Sync it
        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href)
        sync_response = self.repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)

        # 3. Publish and distribute
        publish_data = RpmRpmPublication(
            repository=repo.pulp_href,
            package_checksum_type=package_checksum_type,
            metadata_checksum_type=metadata_checksum_type,
        )
        publish_response = self.publications.create(publish_data)
        created_resources = monitor_task(publish_response.task).created_resources
        publication_href = created_resources[0]
        self.addCleanup(self.publications.delete, publication_href)

        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = self.distributions.create(body)
        created_resources = monitor_task(distribution_response.task).created_resources
        distribution = self.distributions.read(created_resources[0])
        self.addCleanup(self.distributions.delete, distribution.pulp_href)

        repomd = ElementTree.fromstring(
            http_get(os.path.join(distribution.base_url, "repodata/repomd.xml"))
        )

        data_xpath = "{{{}}}data".format(RPM_NAMESPACES["metadata/repo"])
        data_elems = [elem for elem in repomd.findall(data_xpath)]

        repomd_checksum_types = {}
        primary_checksum_types = {}
        checksum_xpath = "{{{}}}checksum".format(RPM_NAMESPACES["metadata/repo"])
        for data_elem in data_elems:
            checksum_type = data_elem.find(checksum_xpath).get("type")
            repomd_checksum_types[data_elem.get("type")] = checksum_type
            if data_elem.get("type") == "primary":
                location_xpath = "{{{}}}location".format(RPM_NAMESPACES["metadata/repo"])
                primary_href = data_elem.find(location_xpath).get("href")
                primary = ElementTree.fromstring(
                    read_xml_gz(http_get(os.path.join(distribution.base_url, primary_href)))
                )
                package_checksum_xpath = "{{{}}}checksum".format(RPM_NAMESPACES["metadata/common"])
                package_xpath = "{{{}}}package".format(RPM_NAMESPACES["metadata/common"])
                package_elems = [elem for elem in primary.findall(package_xpath)]
                pkg_checksum_type = package_elems[0].find(package_checksum_xpath).get("type")
                primary_checksum_types[package_elems[0].get("type")] = pkg_checksum_type

        return repomd_checksum_types, primary_checksum_types
示例#36
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_rpm_remote()
        remote = client.post(RPM_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publication.
        publication = publish(cfg, repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.using_handler(api.task_handler).post(
            DISTRIBUTION_PATH, body
        )
        self.addCleanup(client.delete, distribution['_href'])

        # Pick a content unit, and download it from both Pulp Fixtures…
        unit_path = choice(get_rpm_package_paths(repo))
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(RPM_UNSIGNED_FIXTURE_URL, unit_path))
        ).hexdigest()

        # …and Pulp.
        content = download_content_unit(cfg, distribution, unit_path)
        pulp_hash = hashlib.sha256(content).hexdigest()

        self.assertEqual(fixtures_hash, pulp_hash)
示例#37
0
    def test_all(self):
        """Test content promotion for a distribution.

        This test targets the following issue:

        * `Pulp #4186 <https://pulp.plan.io/issues/4186>`_
        * `Pulp #8475 <https://pulp.plan.io/issues/8475>`_

        Do the following:

        1. Create a repository that has at least one repository version.
        2. Create a publication.
        3. Create 2 distributions - using the same publication. Those
           distributions will have different ``base_path``.
        4. Assert that distributions have the same publication.
        5. Assert that distributions are viewable from base url
        6. Assert that content in distributions are viewable
        7. Select a content unit. Download that content unit from Pulp using
           the two different distributions.
           Assert that content unit has the same checksum when fetched from
           different distributions.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(FILE_REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo["pulp_href"])

        remote = client.post(FILE_REMOTE_PATH,
                             gen_remote(FILE_FIXTURE_MANIFEST_URL))
        self.addCleanup(client.delete, remote["pulp_href"])

        sync(cfg, remote, repo)
        repo = client.get(repo["pulp_href"])

        publication = create_file_publication(cfg, repo)
        self.addCleanup(client.delete, publication["pulp_href"])

        distributions = []
        for _ in range(2):
            body = gen_distribution()
            body["publication"] = publication["pulp_href"]
            distribution = client.using_handler(api.task_handler).post(
                FILE_DISTRIBUTION_PATH, body)
            distributions.append(distribution)
            self.addCleanup(client.delete, distribution["pulp_href"])

        self.assertEqual(distributions[0]["publication"],
                         distributions[1]["publication"], distributions)

        client.response_handler = api.safe_handler
        self.assertEqual(client.get(PULP_CONTENT_BASE_URL).status_code, 200)

        for distribution in distributions:
            self.assertEqual(
                client.get(distribution["base_url"]).status_code, 200)

        unit_urls = []
        unit_path = get_added_content(
            repo)[FILE_CONTENT_NAME][0]["relative_path"]
        for distribution in distributions:
            unit_url = distribution["base_url"]
            unit_urls.append(urljoin(unit_url, unit_path))

        self.assertEqual(
            hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(),
            hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(),
            unit_urls,
        )
示例#38
0
    def test_all(self):
        """Test content promotion for a distribution.

        This test targets the following issue:

        * `Pulp #4186 <https://pulp.plan.io/issues/4186>`_

        Do the following:

        1. Create a repository that has at least one repository version.
        2. Create a publisher, and publication.
        3. Create 2 distributions - using the same publication. Those
           distributions will have different ``base_path``.
        4. Assert that distributions have the same publication.
        5. Select a content unit. Download that content unit from Pulp using
           the two different distributions.
           Assert that content unit has the same checksum when fetched from
           different distributions.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        remote = client.post(FILE_REMOTE_PATH,
                             gen_remote(FILE_FIXTURE_MANIFEST_URL))
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        publisher = client.post(FILE_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        distributions = []
        for _ in range(2):
            body = gen_distribution()
            body['publication'] = publication['_href']
            response_dict = client.post(DISTRIBUTION_PATH, body)
            dist_task = client.get(response_dict['task'])
            distribution_href = dist_task['created_resources'][0]
            distribution = client.get(distribution_href)
            distributions.append(distribution)
            self.addCleanup(client.delete, distribution['_href'])

        self.assertEqual(distributions[0]['publication'],
                         distributions[1]['publication'], distributions)

        unit_urls = []
        unit_path = get_added_content(
            repo)[FILE_CONTENT_NAME][0]['relative_path']
        for distribution in distributions:
            unit_url = cfg.get_hosts('api')[0].roles['api']['scheme']
            unit_url += '://' + distribution['base_url'] + '/'
            unit_urls.append(urljoin(unit_url, unit_path))

        client.response_handler = api.safe_handler
        self.assertEqual(
            hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(),
            hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(),
            unit_urls,
        )
示例#39
0
    def setUpClass(cls):
        """Create class-wide variables.

        1. Create a repository.
        2. Create a remote pointing to external registry.
        3. Sync the repository using the remote and re-read the repo data.
        4. Create a container distribution to serve the repository
        5. Create another container distribution to the serve the repository version

        This tests targets the following issue:

        * `Pulp #4460 <https://pulp.plan.io/issues/4460>`_
        """
        cls.cfg = config.get_config()

        token_auth = cls.cfg.hosts[0].roles['token auth']
        client = cli.Client(cls.cfg)
        client.run('openssl ecparam -genkey -name prime256v1 -noout -out {}'
                   .format(token_auth['private key']).split())
        client.run('openssl ec -in {} -pubout -out {}'.format(
            token_auth['private key'], token_auth['public key']).split())

        cls.client = api.Client(cls.cfg, api.page_handler)
        cls.teardown_cleanups = []

        with contextlib.ExitStack() as stack:
            # ensure tearDownClass runs if an error occurs here
            stack.callback(cls.tearDownClass)

            # Step 1
            _repo = cls.client.post(CONTAINER_REPO_PATH, gen_repo())
            cls.teardown_cleanups.append((cls.client.delete, _repo['pulp_href']))

            # Step 2
            cls.remote = cls.client.post(
                CONTAINER_REMOTE_PATH, gen_container_remote()
            )
            cls.teardown_cleanups.append(
                (cls.client.delete, cls.remote['pulp_href'])
            )

            # Step 3
            sync(cls.cfg, cls.remote, _repo)
            cls.repo = cls.client.get(_repo['pulp_href'])

            # Step 4.
            response_dict = cls.client.using_handler(api.task_handler).post(
                CONTAINER_DISTRIBUTION_PATH,
                gen_distribution(repository=cls.repo['pulp_href'])
            )
            distribution_href = response_dict['pulp_href']
            cls.distribution_with_repo = cls.client.get(distribution_href)
            cls.teardown_cleanups.append(
                (cls.client.delete, cls.distribution_with_repo['pulp_href'])
            )

            # Step 5.
            response_dict = cls.client.using_handler(api.task_handler).post(
                CONTAINER_DISTRIBUTION_PATH,
                gen_distribution(repository_version=cls.repo['latest_version_href'])
            )
            distribution_href = response_dict['pulp_href']
            cls.distribution_with_repo_version = cls.client.get(distribution_href)
            cls.teardown_cleanups.append(
                (cls.client.delete, cls.distribution_with_repo_version['pulp_href'])
            )

            # remove callback if everything goes well
            stack.pop_all()
示例#40
0
    def test_01_create(self):
        """Create a publication distribution.

        Do the following:

        1. Create a repository and 3 repository versions with at least 1 file
           content in it. Create a publication using the second repository
           version.
        2. Create a distribution with 'publication' field set to
           the publication from step (1).
        3. Assert the distribution got created correctly with the correct
           base_path, name, and publication. Assert that content guard is
           unset.
        4. Assert that publication has a 'distributions' reference to the
           distribution (it's backref).

        """
        self.repo.update(self.client.post(REPO_PATH, gen_repo()))
        self.remote.update(
            self.client.post(FILE_REMOTE_PATH, gen_file_remote())
        )
        # create 3 repository versions
        for _ in range(3):
            sync(self.cfg, self.remote, self.repo)
        self.repo = self.client.get(self.repo['_href'])

        versions = get_versions(self.repo)

        self.publication.update(create_file_publication(
            self.cfg,
            self.repo,
            versions[1]['_href']
        ))

        self.distribution.update(
            self.client.post(
                FILE_DISTRIBUTION_PATH,
                gen_distribution(publication=self.publication['_href'])
            )
        )

        self.publication = self.client.get(self.publication['_href'])

        # content_guard is the only parameter unset.
        for key, val in self.distribution.items():
            if key == 'content_guard':
                self.assertIsNone(val, self.distribution)
            else:
                self.assertIsNotNone(val, self.distribution)

        self.assertEqual(
            self.distribution['publication'],
            self.publication['_href'],
            self.distribution
        )

        self.assertEqual(
            self.publication['distributions'][0],
            self.distribution['_href'],
            self.publication
        )
    def do_test(self, policy):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/pulp/pulp-smash/issues/872>`_
        """
        repo_api = deb_repository_api
        remote_api = deb_remote_api
        publication_api = self.Meta.publication_api
        distribution_api = deb_distribution_api

        repo = repo_api.create(gen_repo())
        self.addCleanup(repo_api.delete, repo.pulp_href)

        body = gen_deb_remote()
        remote = remote_api.create(body)
        self.addCleanup(remote_api.delete, remote.pulp_href)

        # Sync a Repository
        repository_sync_data = RepositorySyncURL(remote=remote.pulp_href)
        sync_response = repo_api.sync(repo.pulp_href, repository_sync_data)
        monitor_task(sync_response.task)
        repo = repo_api.read(repo.pulp_href)

        # Create a publication.
        publish_data = self.Meta.Publication(repository=repo.pulp_href)
        publish_response = publication_api.create(publish_data)
        publication_href = monitor_task(publish_response.task)[0]
        self.addCleanup(publication_api.delete, publication_href)

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = distribution_api.create(body)
        distribution_href = monitor_task(distribution_response.task)[0]
        distribution = distribution_api.read(distribution_href)
        self.addCleanup(distribution_api.delete, distribution.pulp_href)

        # Pick a content unit (of each type), and download it from both Pulp Fixtures…
        unit_paths = [
            choice(paths)
            for paths in self.Meta.get_content_unit_paths(repo).values()
            if paths
        ]
        fixtures_hashes = [
            hashlib.sha256(
                utils.http_get(urljoin(DEB_FIXTURE_URL,
                                       unit_path[0]))).hexdigest()
            for unit_path in unit_paths
        ]

        # …and Pulp.
        pulp_hashes = []
        cfg = config.get_config()
        for unit_path in unit_paths:
            content = download_content_unit(cfg, distribution.to_dict(),
                                            unit_path[1])
            pulp_hashes.append(hashlib.sha256(content).hexdigest())

        self.assertEqual(fixtures_hashes, pulp_hashes)
示例#42
0
    def test_repo_auto_distribution(self):
        """Test auto distribution of a repository.

        This test targets the following issue:

        * `Pulp Smash #947 <https://github.com/PulpQE/pulp-smash/issues/947>`_

        Do the following:

        1. Create a repository that has at least one repository version.
        2. Create a publisher.
        3. Create a distribution and set the repository and publishera to the
           previous created ones.
        4. Create a publication using the latest repository version.
        5. Assert that the previous distribution has a  ``publication`` set as
           the one created in step 4.
        6. Create a new repository version by adding content to the repository.
        7. Create another publication using the just created repository
           version.
        8. Assert that distribution now has the ``publication`` set to the
           publication created in the step 7.
        9. Verify that content added in the step 7 is now available to download
           from distribution, and verify that the content unit has the same
           checksum when fetched directly from Pulp-Fixtures.
        """
        self.assertGreaterEqual(len(self.contents), 2, self.contents)

        # Create a repository.
        repo = self.client.post(REPO_PATH, gen_repo())
        self.addCleanup(self.client.delete, repo['_href'])
        self.client.post(
            repo['_versions_href'],
            {'add_content_units': [self.contents[0]['_href']]}
        )
        repo = self.client.get(repo['_href'])

        # Create publisher.
        publisher = self.client.post(FILE_PUBLISHER_PATH, gen_publisher())
        self.addCleanup(self.client.delete, publisher['_href'])

        # Create a distribution
        body = gen_distribution()
        body['repository'] = repo['_href']
        body['publisher'] = publisher['_href']
        distribution = self.client.post(DISTRIBUTION_PATH, body)
        self.addCleanup(self.client.delete, distribution['_href'])
        last_version_href = get_versions(repo)[-1]['_href']
        publication = publish(self.cfg, publisher, repo, last_version_href)
        self.addCleanup(self.client.delete, publication['_href'])
        distribution = self.client.get(distribution['_href'])

        # Assert that distribution was updated as per step 5.
        self.assertEqual(distribution['publication'], publication['_href'])

        # Create a new repository version.
        self.client.post(
            repo['_versions_href'],
            {'add_content_units': [self.contents[1]['_href']]}
        )
        repo = self.client.get(repo['_href'])
        last_version_href = get_versions(repo)[-1]['_href']
        publication = publish(self.cfg, publisher, repo, last_version_href)
        self.addCleanup(self.client.delete, publication['_href'])
        distribution = self.client.get(distribution['_href'])

        # Assert that distribution was updated as per step 8.
        self.assertEqual(distribution['publication'], publication['_href'])
        unit_path = get_added_content(repo, last_version_href)[0]['relative_path']
        unit_url = self.cfg.get_hosts('api')[0].roles['api']['scheme']
        unit_url += '://' + distribution['base_url'] + '/'
        unit_url = urljoin(unit_url, unit_path)

        self.client.response_handler = api.safe_handler
        pulp_hash = hashlib.sha256(
            self.client.get(unit_url).content
        ).hexdigest()
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(FILE_URL, unit_path))
        ).hexdigest()

        # Verify checksum. Step 9.
        self.assertEqual(fixtures_hash, pulp_hash)
示例#43
0
    def do_metadata_comparison_test(self, repo_url):
        """Sync and publish an RPM repository and verify the metadata is what was expected."""
        # 1. create repo and remote
        repo = self.repo_api.create(gen_repo())
        self.addCleanup(self.repo_api.delete, repo.pulp_href)

        body = gen_rpm_remote(repo_url, policy="on_demand")
        remote = self.remote_api.create(body)
        self.addCleanup(self.remote_api.delete, remote.pulp_href)

        # 2, 3. Sync, publish and distribute
        repository_sync_data = RpmRepositorySyncURL(remote=remote.pulp_href)
        sync_response = self.repo_api.sync(repo.pulp_href,
                                           repository_sync_data)
        monitor_task(sync_response.task)

        publish_data = RpmRpmPublication(repository=repo.pulp_href)
        publish_response = self.publications.create(publish_data)
        created_resources = monitor_task(
            publish_response.task).created_resources
        publication_href = created_resources[0]
        self.addCleanup(self.publications.delete, publication_href)

        body = gen_distribution()
        body["publication"] = publication_href
        distribution_response = self.distributions.create(body)
        created_resources = monitor_task(
            distribution_response.task).created_resources
        distribution = self.distributions.read(created_resources[0])
        self.addCleanup(self.distributions.delete, distribution.pulp_href)

        # 4. Download and parse the metadata.
        original_repomd = ElementTree.fromstring(
            http_get(os.path.join(repo_url, "repodata/repomd.xml")))

        reproduced_repomd = ElementTree.fromstring(
            http_get(os.path.join(distribution.base_url,
                                  "repodata/repomd.xml")))

        def get_metadata_content(base_url, repomd_elem, meta_type):
            """Return the text contents of metadata file.

            Provided a url, a repomd root element, and a metadata type, locate the metadata
            file's location href, download it from the provided url, un-gzip it, parse it, and
            return the root element node.

            Don't use this with large repos because it will blow up.
            """
            # <ns0:repomd xmlns:ns0="http://linux.duke.edu/metadata/repo">
            #     <ns0:data type="primary">
            #         <ns0:checksum type="sha256">[…]</ns0:checksum>
            #         <ns0:location href="repodata/[…]-primary.xml.gz" />
            #         …
            #     </ns0:data>
            #     …
            xpath = "{{{}}}data".format(RPM_NAMESPACES["metadata/repo"])
            data_elems = [
                elem for elem in repomd_elem.findall(xpath)
                if elem.get("type") == meta_type
            ]
            xpath = "{{{}}}location".format(RPM_NAMESPACES["metadata/repo"])
            location_href = data_elems[0].find(xpath).get("href")

            return read_xml_gz(http_get(os.path.join(base_url, location_href)))

        # 5, 6. Convert the metadata into a more workable form and then compare.
        for metadata_file in ["primary", "filelists", "other"]:
            with self.subTest(metadata_file):
                original_metadata = get_metadata_content(
                    repo_url, original_repomd, metadata_file)
                generated_metadata = get_metadata_content(
                    distribution.base_url, reproduced_repomd, metadata_file)

                self.compare_metadata_file(original_metadata,
                                           generated_metadata, metadata_file)