Example #1
0
 def tearDownClass(cls):
     """Delete fixtures and orphans."""
     client = api.Client(cls.cfg)
     for repo in cls.repos:
         client.delete(repo['_href'])
     client.delete(ORPHANS_PATH)
Example #2
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     cls.api_client = api.Client(cls.cfg, api.json_handler)
     cls.cli_client = cli.Client(cls.cfg)
     cls.sudo = () if cli.is_root(cls.cfg) else ('sudo',)
def tearDownModule():  # pylint:disable=invalid-name
    """Delete orphan content units."""
    cfg = config.get_config()
    api.Client(cfg).delete(ORPHANS_PATH)
    if cfg.pulp_selinux_enabled:
        set_pulp_manage_rsync(cfg, False)
Example #4
0
    def test_all(self):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        body = gen_plugin_template_remote()
        remote = client.post(PLUGIN_TEMPLATE_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['_href'])

        sync(cfg, remote, repo)
        repo = client.get(repo['_href'])

        # Create a publisher.
        publisher = client.post(PLUGIN_TEMPLATE_PUBLISHER_PATH,
                                gen_plugin_template_publisher())
        self.addCleanup(client.delete, publisher['_href'])

        # Create a publication.
        publication = publish(cfg, publisher, repo)
        self.addCleanup(client.delete, publication['_href'])

        # Create a distribution.
        body = gen_distribution()
        body['publication'] = publication['_href']
        distribution = client.post(DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution['_href'])

        # Pick a content unit, and download it from both Pulp Fixtures…
        unit_path = choice(get_plugin_template_content_unit_paths(repo))
        fixtures_hash = hashlib.sha256(
            utils.http_get(urljoin(PLUGIN_TEMPLATE_FIXTURE_URL,
                                   unit_path))).hexdigest()

        # …and Pulp.
        client.response_handler = api.safe_handler

        unit_url = cfg.get_hosts('api')[0].roles['api']['scheme']
        unit_url += '://' + distribution['base_url'] + '/'
        unit_url = urljoin(unit_url, unit_path)

        pulp_hash = hashlib.sha256(client.get(unit_url).content).hexdigest()
        self.assertEqual(fixtures_hash, pulp_hash)
Example #5
0
 def setUp(self):
     """Create an API client."""
     self.client = api.Client(self.cfg, api.json_handler)
Example #6
0
    def test_all(self):
        """Sync two copies of the same UpdateRecords.

        Make sure we end up with only one copy.

        Do the following:

        1. Create a repository and a remote.
        2. Sync the remote.
        3. Assert that the content summary matches what is expected.
        4. Create a new remote w/ using fixture containing updated errata
           (updaterecords with the ID as the existing updaterecord content, but
           different metadata).
        5. Sync the remote again.
        6. Assert that repository version is different from the previous one
           but has the same content summary.
        7. Assert that the updaterecords have changed since the last sync.
        """
        client = api.Client(self.cfg, api.json_handler)

        repo = client.post(REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo['pulp_href'])

        # Create a remote with the unsigned RPM fixture url.
        # We need to use the unsigned fixture because the one used down below
        # has unsigned RPMs. Signed and unsigned units have different hashes,
        # so they're seen as different units.
        body = gen_rpm_remote(url=RPM_UNSIGNED_FIXTURE_URL)
        remote = client.post(RPM_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['pulp_href'])

        # Sync the repository.
        self.assertIsNone(repo['latest_version_href'])
        sync(self.cfg, remote, repo)
        repo = client.get(repo['pulp_href'])
        self.assertDictEqual(get_content_summary(repo), RPM_FIXTURE_SUMMARY)

        # Save a copy of the original updateinfo
        original_updaterecords = {
            content['id']: content
            for content in get_content(repo)[RPM_ADVISORY_CONTENT_NAME]
        }

        # Create a remote with a different test fixture, one containing mutated
        # updateinfo.
        body = gen_rpm_remote(url=RPM_UPDATED_UPDATEINFO_FIXTURE_URL)
        remote = client.post(RPM_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote['pulp_href'])

        # Sync the repository again.
        sync(self.cfg, remote, repo)
        repo = client.get(repo['pulp_href'])
        self.assertDictEqual(get_content_summary(repo), RPM_FIXTURE_SUMMARY)
        self.assertEqual(
            len(get_added_content(repo)[RPM_ADVISORY_CONTENT_NAME]), 4)
        self.assertEqual(
            len(get_removed_content(repo)[RPM_ADVISORY_CONTENT_NAME]), 4)

        # Test that the updateinfo have been modified.
        mutated_updaterecords = {
            content['id']: content
            for content in get_content(repo)[RPM_ADVISORY_CONTENT_NAME]
        }

        self.assertNotEqual(mutated_updaterecords, original_updaterecords)
        self.assertEqual(
            mutated_updaterecords[RPM_UPDATERECORD_ID]['description'],
            'Updated Gorilla_Erratum and the updated date contains timezone',
            mutated_updaterecords[RPM_UPDATERECORD_ID],
        )
Example #7
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     if cls.cfg.pulp_version < Version('2.18.1'):
         raise unittest.SkipTest('This test requires Pulp 2.18.1 or newer.')
     cls.client = api.Client(cls.cfg, api.json_handler)
 def setUpClass(cls):
     """Create class-wide variables, and clean orphan units."""
     cls.cfg = config.get_config()
     delete_orphans(cls.cfg)
     cls.client = api.Client(cls.cfg, api.page_handler)
 def setUpClass(cls):
     """Create an API Client."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
     cls.worker = {}
Example #10
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     delete_orphans(cls.cfg)
     cls.client = api.Client(cls.cfg, api.page_handler)
     cls.file = {'file': utils.http_get(RPM_UNSIGNED_URL)}
Example #11
0
 def setUpClass(cls):
     """Create class wide-variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
     cls.distribution = {}
Example #12
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
     cls.remote = {}
     cls.body = _gen_verbose_remote()
Example #13
0
 def test_01_get_by_invalid_type(self):
     """Get orphans by content type. Specify a non-existent content type."""
     client = api.Client(config.get_config(), api.echo_handler)
     response = client.get(urljoin(ORPHANS_PATH, 'foo/'))
     self.assertEqual(response.status_code, 404)
Example #14
0
 def test_05_no_orphans_exist(self):
     """Assert no orphans exist."""
     orphans = api.Client(config.get_config()).get(ORPHANS_PATH).json()
     self.assertEqual(_count_orphans(orphans), 0, orphans)
Example #15
0
    def setUpClass(cls):
        """Create class-wide variables.

        1. Create a repository.
        2. Create a remote pointing to external registry.
        3. Sync the repository using the remote and re-read the repo data.
        4. Create a container distribution to serve the repository
        5. Create another container distribution to the serve the repository version

        This tests targets the following issue:

        * `Pulp #4460 <https://pulp.plan.io/issues/4460>`_
        """
        cls.cfg = config.get_config()

        token_auth = cls.cfg.hosts[0].roles['token auth']
        client = cli.Client(cls.cfg)
        client.run('openssl ecparam -genkey -name prime256v1 -noout -out {}'
                   .format(token_auth['private key']).split())
        client.run('openssl ec -in {} -pubout -out {}'.format(
            token_auth['private key'], token_auth['public key']).split())

        cls.client = api.Client(cls.cfg, api.page_handler)
        cls.teardown_cleanups = []

        with contextlib.ExitStack() as stack:
            # ensure tearDownClass runs if an error occurs here
            stack.callback(cls.tearDownClass)

            # Step 1
            _repo = cls.client.post(CONTAINER_REPO_PATH, gen_repo())
            cls.teardown_cleanups.append((cls.client.delete, _repo['pulp_href']))

            # Step 2
            cls.remote = cls.client.post(
                CONTAINER_REMOTE_PATH, gen_container_remote()
            )
            cls.teardown_cleanups.append(
                (cls.client.delete, cls.remote['pulp_href'])
            )

            # Step 3
            sync(cls.cfg, cls.remote, _repo)
            cls.repo = cls.client.get(_repo['pulp_href'])

            # Step 4.
            response_dict = cls.client.using_handler(api.task_handler).post(
                CONTAINER_DISTRIBUTION_PATH,
                gen_distribution(repository=cls.repo['pulp_href'])
            )
            distribution_href = response_dict['pulp_href']
            cls.distribution_with_repo = cls.client.get(distribution_href)
            cls.teardown_cleanups.append(
                (cls.client.delete, cls.distribution_with_repo['pulp_href'])
            )

            # Step 5.
            response_dict = cls.client.using_handler(api.task_handler).post(
                CONTAINER_DISTRIBUTION_PATH,
                gen_distribution(repository_version=cls.repo['latest_version_href'])
            )
            distribution_href = response_dict['pulp_href']
            cls.distribution_with_repo_version = cls.client.get(distribution_href)
            cls.teardown_cleanups.append(
                (cls.client.delete, cls.distribution_with_repo_version['pulp_href'])
            )

            # remove callback if everything goes well
            stack.pop_all()
Example #16
0
 def setUpClass(cls):
     """Define class-wide variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg)
     cls.repo = {}
     cls.distribution = {}
Example #17
0
    def do_test(self, policy):
        """Verify whether content served by pulp can be downloaded.

        The process of publishing content is more involved in Pulp 3 than it
        was under Pulp 2. Given a repository, the process is as follows:

        1. Create a publication from the repository. (The latest repository
           version is selected if no version is specified.) A publication is a
           repository version plus metadata.
        2. Create a distribution from the publication. The distribution defines
           at which URLs a publication is available, e.g.
           ``http://example.com/content/foo/`` and
           ``http://example.com/content/bar/``.

        Do the following:

        1. Create, populate, publish, and distribute a repository.
        2. Select a random content unit in the distribution. Download that
           content unit from Pulp, and verify that the content unit has the
           same checksum when fetched directly from Pulp-Fixtures.

        This test targets the following issues:

        * `Pulp #2895 <https://pulp.plan.io/issues/2895>`_
        * `Pulp Smash #872 <https://github.com/PulpQE/pulp-smash/issues/872>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(DEB_REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo["pulp_href"])

        body = gen_deb_remote()
        remote = client.post(DEB_REMOTE_PATH, body)
        self.addCleanup(client.delete, remote["pulp_href"])

        sync(cfg, remote, repo)
        repo = client.get(repo["pulp_href"])

        # Create a publication.
        publication = self.Meta.create_publication(cfg, repo)
        self.addCleanup(client.delete, publication["pulp_href"])

        # Create a distribution.
        body = gen_distribution()
        body["publication"] = publication["pulp_href"]
        distribution = client.using_handler(api.task_handler).post(
            self.Meta.DISTRIBUTION_PATH, body)
        self.addCleanup(client.delete, distribution["pulp_href"])

        # Pick a content unit (of each type), and download it from both Pulp Fixtures…
        unit_paths = [
            choice(paths)
            for paths in self.Meta.get_content_unit_paths(repo).values()
            if paths
        ]
        fixtures_hashes = [
            hashlib.sha256(
                utils.http_get(urljoin(DEB_FIXTURE_URL,
                                       unit_path[0]))).hexdigest()
            for unit_path in unit_paths
        ]

        # …and Pulp.
        contents = [
            download_content_unit(cfg, distribution, unit_path[1])
            for unit_path in unit_paths
        ]
        pulp_hashes = [
            hashlib.sha256(content).hexdigest() for content in contents
        ]
        self.assertEqual(fixtures_hashes, pulp_hashes)
Example #18
0
 def setUp(self):
     """Set variables used by each test case."""
     self.client = api.Client(config.get_config())
     self.href = urljoin(REPOSITORY_PATH, utils.uuid4())
Example #19
0
    def test_all(self):
        """Test whether metadata copied between repos are independent.

        This test targets the following issues:

        * `Pulp #1944 <https://pulp.plan.io/issues/1944>`_
        * `Pulp-2-Tests #91
          <https://github.com/PulpQE/Pulp-2-Tests/issues/91>`_

        Do the following:

        1. Create and sync a repository containing
           ``yum_repo_metadata_file``.
        2. Create another repo and copy yum metadata from
           first repo to second repo.
        3. Publish repo 2.
        4. Remove the metadata units from the first repo. Delete
           orphan packages.
        5. Publish repo 2 again and check whether the metadata is
           present in the second repo still.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)
        body = gen_repo(importer_config={'feed': RPM_YUM_METADATA_FILE},
                        distributors=[gen_distributor()])
        repo_1 = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo_1['_href'])
        sync_repo(cfg, repo_1)
        repo_1 = client.get(repo_1['_href'], params={'details': True})

        # Create a second repository.
        body = gen_repo(distributors=[gen_distributor()])
        repo_2 = client.post(REPOSITORY_PATH, body)
        repo_2 = client.get(repo_2['_href'], params={'details': True})
        self.addCleanup(client.delete, repo_2['_href'])

        # Copy data to second repository.
        client.post(
            urljoin(repo_2['_href'], 'actions/associate/'), {
                'source_repo_id': repo_1['id'],
                'override_config': {
                    'recursive': True
                },
                'criteria': {
                    'filters': {},
                    'type_ids': ['yum_repo_metadata_file'],
                }
            })

        # Publish repo 2
        publish_repo(cfg, repo_2)
        # Removing metadata from repo 1 and deleting orphans.
        client.post(urljoin(repo_1['_href'], 'actions/unassociate/'),
                    {'criteria': {
                        'filters': {}
                    }})
        repo_1 = client.get(repo_1['_href'], params={'details': True})
        client.delete(ORPHANS_PATH)
        # Publish repo 2 again
        publish_repo(cfg, repo_2)
        repo_2 = client.get(repo_2['_href'], params={'details': True})

        # retrieve repodata of the published repo
        xml_element = get_repodata_repomd_xml(cfg, repo_2['distributors'][0])
        xpath = ('{{{namespace}}}data'.format(
            namespace=RPM_NAMESPACES['metadata/repo']))
        yum_meta_data_element = [
            element for element in xml_element.findall(xpath)
            if element.attrib['type'] == 'productid'
        ]
        self.assertNotIn('yum_repo_metadata_file',
                         repo_1['content_unit_counts'])
        self.assertEqual(
            repo_2['content_unit_counts']['yum_repo_metadata_file'], 1)
        self.assertGreater(len(yum_meta_data_element), 0)
Example #20
0
 def tearDownClass(cls):
     """Clean up resources."""
     if cls.repo:
         api.Client(cls.cfg).delete(cls.repo['_href'])
     super().tearDownClass()
Example #21
0
 def setUpClass(cls):
     """Create class-wide variable."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
Example #22
0
    def setUpClass(cls):
        """Create two repositories, first is feed of second one.

        Provides server config and set of iterable to delete. Following steps
        are executed:

        1. Create repository foo with feed, sync and publish it.
        2. Create repository bar with foo as a feed and run sync.
        3. Get content of both repositories.
        4. Remove random unit from repository foo and publish foo.
        5. Sync repository bar.
        6. Get content of both repositories.
        """
        super(RemoveMissingTestCase, cls).setUpClass()
        cls.responses = {}
        hrefs = []  # repository hrefs

        # Create and sync a repository.
        client = api.Client(cls.cfg)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href'])
        cls.resources.add(hrefs[0])  # mark for deletion
        cls.responses['first sync'] = sync_repo(cls.cfg, hrefs[0])

        # Add a distributor and publish it.
        cls.responses['distribute'] = client.post(
            urljoin(hrefs[0], 'distributors/'),
            gen_distributor(),
        )
        cls.responses['first publish'] = client.post(
            urljoin(hrefs[0], 'actions/publish/'),
            {'id': cls.responses['distribute'].json()['id']},
        )

        # Create and sync a second repository. We disable SSL validation for a
        # practical reason: each HTTPS feed must have a certificate to work,
        # which is burdensome to do here.
        body = gen_repo()
        body['importer_config']['feed'] = urljoin(
            cls.cfg.base_url,
            _PUBLISH_DIR +
            cls.responses['distribute'].json()['config']['relative_url'],
        )
        body['importer_config']['remove_missing'] = True  # see docstring
        body['importer_config']['ssl_validation'] = False
        hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href'])
        cls.resources.add(hrefs[1])  # mark for deletion
        cls.responses['second sync'] = sync_repo(cls.cfg, hrefs[1])

        # Get contents of both repositories
        for i, href in enumerate(hrefs):
            cls.responses['repo {} units, pre'.format(i)] = client.post(
                urljoin(href, 'search/units/'),
                {'criteria': {}},
            )

        # Get random unit from first repository to remove
        cls.removed_unit = random.choice([
            unit['metadata']['name']
            for unit in cls.responses['repo 0 units, pre'].json()
            if unit['unit_type_id'] == 'rpm'
        ])

        # Remove unit from first repo and publish again
        cls.responses['remove unit'] = client.post(
            urljoin(hrefs[0], 'actions/unassociate/'),
            {
                'criteria': {
                    'fields': {
                        'unit': [
                            'arch',
                            'checksum',
                            'checksumtype',
                            'epoch',
                            'name',
                            'release',
                            'version',
                        ]
                    },
                    'type_ids': ['rpm'],
                    'filters': {
                        'unit': {
                            'name': cls.removed_unit
                        }
                    }
                }
            },
        )

        # Publish the first repo again, and sync the second repo again.
        cls.responses['second publish'] = client.post(
            urljoin(hrefs[0], 'actions/publish/'),
            {'id': cls.responses['distribute'].json()['id']},
        )
        cls.responses['third sync'] = sync_repo(cls.cfg, hrefs[1])

        # Search for units in both repositories again
        for i, href in enumerate(hrefs):
            cls.responses['repo {} units, post'.format(i)] = client.post(
                urljoin(href, 'search/units/'),
                {'criteria': {}},
            )
Example #23
0
 def setUpClass(cls):
     """Create shared variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
Example #24
0
    def test_all(self):
        """Test content promotion for a distribution.

        This test targets the following issue:

        * `Pulp #4186 <https://pulp.plan.io/issues/4186>`_
        * `Pulp #8475 <https://pulp.plan.io/issues/8475>`_

        Do the following:

        1. Create a repository that has at least one repository version.
        2. Create a publication.
        3. Create 2 distributions - using the same publication. Those
           distributions will have different ``base_path``.
        4. Assert that distributions have the same publication.
        5. Assert that distributions are viewable from base url
        6. Assert that content in distributions are viewable
        7. Select a content unit. Download that content unit from Pulp using
           the two different distributions.
           Assert that content unit has the same checksum when fetched from
           different distributions.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        repo = client.post(FILE_REPO_PATH, gen_repo())
        self.addCleanup(client.delete, repo["pulp_href"])

        remote = client.post(FILE_REMOTE_PATH,
                             gen_remote(FILE_FIXTURE_MANIFEST_URL))
        self.addCleanup(client.delete, remote["pulp_href"])

        sync(cfg, remote, repo)
        repo = client.get(repo["pulp_href"])

        publication = create_file_publication(cfg, repo)
        self.addCleanup(client.delete, publication["pulp_href"])

        distributions = []
        for _ in range(2):
            body = gen_distribution()
            body["publication"] = publication["pulp_href"]
            distribution = client.using_handler(api.task_handler).post(
                FILE_DISTRIBUTION_PATH, body)
            distributions.append(distribution)
            self.addCleanup(client.delete, distribution["pulp_href"])

        self.assertEqual(distributions[0]["publication"],
                         distributions[1]["publication"], distributions)

        client.response_handler = api.safe_handler
        self.assertEqual(client.get(PULP_CONTENT_BASE_URL).status_code, 200)

        for distribution in distributions:
            self.assertEqual(
                client.get(distribution["base_url"]).status_code, 200)

        unit_urls = []
        unit_path = get_added_content(
            repo)[FILE_CONTENT_NAME][0]["relative_path"]
        for distribution in distributions:
            unit_url = distribution["base_url"]
            unit_urls.append(urljoin(unit_url, unit_path))

        self.assertEqual(
            hashlib.sha256(client.get(unit_urls[0]).content).hexdigest(),
            hashlib.sha256(client.get(unit_urls[1]).content).hexdigest(),
            unit_urls,
        )
Example #25
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg)
Example #26
0
def tearDownModule():  # pylint:disable=invalid-name
    """Delete orphan content units."""
    api.Client(config.get_config()).delete(ORPHANS_PATH)
Example #27
0
 def setUpClass(cls):
     """Make calls to the server and save the responses."""
     super(SeriesTestCase, cls).setUpClass()
     client = api.Client(cls.cfg, api.echo_handler)
     for key, path in _PATHS.items():
         cls.responses[key] = client.post(path, {key + '_criteria': {}})
    def test_all(self):
        """Publish w/an rsync distributor when ``serve_https`` is false."""
        if selectors.bug_is_untestable(2657, self.cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2657')
        if (selectors.bug_is_untestable(3313, self.cfg.pulp_version) and
                utils.os_is_f27(self.cfg)):
            self.skipTest('https://pulp.plan.io/issues/3313')

        # Create a user with which to rsync files
        ssh_user, priv_key = self.make_user(self.cfg)
        ssh_identity_file = self.write_private_key(self.cfg, priv_key)

        # Create a repo
        client = api.Client(self.cfg, api.json_handler)
        body = {
            'distributors': [],
            'id': utils.uuid4(),
            'importer_config': {'feed': FILE_FEED_URL},
            'importer_type_id': 'iso_importer',
        }
        body['distributors'].append({
            'auto_publish': False,
            'distributor_config': {'serve_http': True, 'serve_https': False},
            'distributor_id': utils.uuid4(),
            'distributor_type_id': 'iso_distributor',
        })
        body['distributors'].append({
            'auto_publish': False,
            'distributor_config': {
                'predistributor_id': body['distributors'][0]['distributor_id'],
                'remote': {
                    'host': urlparse(self.cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                },
            },
            'distributor_id': utils.uuid4(),
            'distributor_type_id': 'iso_rsync_distributor',
        })
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Sync and publish the repo. If Pulp #2657 hasn't been fixed,
        # publishing the iso_rsync_distributor will fail with an error like:
        #
        #     pulp.plugins.rsync.publish:ERROR: (1181-98080) rsync: link_stat
        #     "/var/lib/pulp/published/https/isos/repo-id/PULP_MANIFEST"
        #     failed: No such file or directory (2)
        #
        utils.sync_repo(self.cfg, repo)
        dists = get_dists_by_type_id(self.cfg, repo)
        utils.publish_repo(self.cfg, repo, {
            'id': dists['iso_distributor']['id'],
        })
        utils.publish_repo(self.cfg, repo, {
            'id': dists['iso_rsync_distributor']['id'],
        })

        # Verify the correct units are on the remote system.
        cli_client = cli.Client(self.cfg)
        sudo = () if utils.is_root(self.cfg) else ('sudo',)
        path = dists['iso_rsync_distributor']['config']['remote']['root']
        path = os.path.join(path, 'content/units')
        cmd = sudo + ('find', path, '-name', '*.iso')
        files = cli_client.run(cmd).stdout.strip().split('\n')
        self.assertEqual(len(files), FILE_FEED_COUNT, files)
 def _verify_units_not_in_repo(self, cfg, repo_href):
     """Verify no content units are in the specified repository."""
     repo = api.Client(cfg).get(repo_href).json()
     for key, val in repo['content_unit_counts'].items():
         with self.subTest(key=key):
             self.assertEqual(val, 0)
Example #30
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.client = api.Client(config.get_config(), api.json_handler)
     cls.task = {}