Beispiel #1
0
def setUpModule():  # pylint:disable=invalid-name
    """Conditionally skip tests. Create repositories with fixture data."""
    cfg = config.get_config()
    if not selectors.bug_is_fixed(1991, cfg.pulp_version):
        raise unittest.SkipTest('https://pulp.plan.io/issues/1991')
    set_up_module()

    # Fetch RPMs.
    _SIGNED_PACKAGES['rpm'] = utils.http_get(RPM_SIGNED_URL)
    _SIGNED_PACKAGES['srpm'] = utils.http_get(SRPM_SIGNED_URL)
    _UNSIGNED_PACKAGES['rpm'] = utils.http_get(RPM_UNSIGNED_URL)
    _UNSIGNED_PACKAGES['srpm'] = utils.http_get(SRPM_UNSIGNED_URL)
    if selectors.bug_is_fixed(1806, cfg.pulp_version):
        _SIGNED_PACKAGES['drpm'] = utils.http_get(DRPM_SIGNED_URL)
        _UNSIGNED_PACKAGES['drpm'] = utils.http_get(DRPM_UNSIGNED_URL)

    # Create repos, and upload RPMs to them.
    client = api.Client(cfg, api.json_handler)
    try:
        repo = client.post(REPOSITORY_PATH, gen_repo())
        _REPOS['signed'] = repo
        for type_id, pkg in _SIGNED_PACKAGES.items():
            upload_import_unit(cfg, pkg, {'unit_type_id': type_id}, repo)

        repo = client.post(REPOSITORY_PATH, gen_repo())
        _REPOS['unsigned'] = repo
        for type_id, pkg in _UNSIGNED_PACKAGES.items():
            upload_import_unit(cfg, pkg, {'unit_type_id': type_id}, repo)
    except:  # noqa:E722
        _SIGNED_PACKAGES.clear()
        _UNSIGNED_PACKAGES.clear()
        for _ in range(len(_REPOS)):
            client.delete(_REPOS.popitem()[1]['_href'])
        raise
Beispiel #2
0
    def do_test(self, recursive, recursive_conservative):
        """Copy of units for a repository with rich/weak dependencies."""
        repos = []
        body = gen_repo(importer_config={'feed': RPM_RICH_WEAK_FEED_URL},
                        distributors=[gen_distributor()])
        repos.append(self.client.post(REPOSITORY_PATH, body))
        self.addCleanup(self.client.delete, repos[0]['_href'])
        sync_repo(self.cfg, repos[0])
        repos.append(self.client.post(REPOSITORY_PATH, gen_repo()))
        self.addCleanup(self.client.delete, repos[1]['_href'])

        # Pulp 2.18.1 introduced a new flag `recursive_conservative`.
        # If true, units are copied together with their
        # dependencies, unless those are already satisfied by the content in
        # the target repository.
        override_config = {'recursive': recursive}
        if self.cfg.pulp_version >= Version('2.18.1'):
            override_config.update(
                {'recursive_conservative': recursive_conservative})
        self.client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'), {
                'source_repo_id': repos[0]['id'],
                'override_config': override_config,
                'criteria': {
                    'filters': {
                        'unit': {
                            'name': RPM2_RICH_WEAK_DATA['name']
                        }
                    },
                    'type_ids': ['rpm'],
                },
            })
        return self.client.get(repos[1]['_href'], params={'details': True})
Beispiel #3
0
    def test_all(self):
        """Test that recursive copy of erratas copies RPM packages.

        This test targets the following issues:

        * `Pulp Smash #769 <https://github.com/PulpQE/pulp-smash/issues/769>`_
        * `Pulp #3004 <https://pulp.plan.io/issues/3004>`_

        Do the following:

        1. Create and sync a repository with errata, and RPM packages.
        2. Create second repository.
        3. Copy units from from first repository to second repository
           using ``recursive`` as true, and filter  ``type_id`` as
           ``erratum``.
        4. Assert that RPM packages were copied.
        """
        cfg = config.get_config()
        if not selectors.bug_is_fixed(3004, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/3004')

        repos = []
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UPDATED_INFO_FEED_URL
        body['distributors'] = [gen_distributor()]
        repos.append(client.post(REPOSITORY_PATH, body))
        self.addCleanup(client.delete, repos[0]['_href'])
        sync_repo(cfg, repos[0])

        # Create a second repository.
        repos.append(client.post(REPOSITORY_PATH, gen_repo()))
        self.addCleanup(client.delete, repos[1]['_href'])

        # Copy data to second repository.
        client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'),
            {
                'source_repo_id': repos[0]['id'],
                'override_config': {
                    'recursive': True
                },
                'criteria': {
                    'filters': {},
                    'type_ids': ['erratum']
                },
            },
        )

        # Assert that RPM packages were copied.
        units = search_units(cfg, repos[1], {'type_ids': ['rpm']})
        self.assertGreater(len(units), 0)
Beispiel #4
0
 def test_all(self):
     """Check if Pulp only associate missing repo content."""
     cfg = config.get_config()
     if cfg.pulp_version < Version('2.11'):
         self.skipTest(
             'Selective association is available on Pulp 2.11+ see Pulp '
             '#2457 for more information'
         )
     client = api.Client(cfg, api.json_handler)
     body = gen_repo()
     body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
     repo = client.post(REPOSITORY_PATH, body)
     self.addCleanup(client.delete, repo['_href'])
     sync_repo(cfg, repo)
     rpm_units = (
         _get_units_by_type(search_units(cfg, repo), 'rpm')
     )
     # Let's select up to 1/5 of the available units to remove
     to_remove = random.sample(
         rpm_units, random.randrange(int(RPM_UNSIGNED_FEED_COUNT / 4)))
     for unit in to_remove:
         _remove_unit(cfg, repo, unit)
     report = client.post(urljoin(repo['_href'], 'actions/sync/'))
     tasks = tuple(api.poll_spawned_tasks(cfg, report))
     self.assertEqual(len(tasks), 1, tasks)
     self.assertEqual(
         tasks[0]['result']['added_count'], len(to_remove), to_remove)
Beispiel #5
0
 def setUpClass(cls):
     """Create a repository."""
     cls.cfg = config.get_config()
     client = api.Client(cls.cfg)
     body = gen_repo()
     body['distributors'] = [gen_distributor()]
     cls.repo = client.post(REPOSITORY_PATH, body).json()
Beispiel #6
0
    def do_test(self, distributor_config_update):
        """Implement most of the test logic."""
        rpms = tuple(
            utils.http_get(url)
            for url in (RPM_UNSIGNED_URL, RPM2_UNSIGNED_URL))

        # Create a repository.
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config'].update(
            distributor_config_update)
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Upload an RPM, publish the repo, and count metadata files twice.
        cli_client = cli.Client(self.cfg)
        sudo = () if cli.is_root(self.cfg) else ('sudo', )
        find_repodata_cmd = sudo + (
            'find',
            os.path.join('/var/lib/pulp/published/yum/master/yum_distributor/',
                         str(repo['id'])), '-type', 'd', '-name', 'repodata')
        found = []
        for rpm in rpms:
            upload_import_unit(self.cfg, rpm, {'unit_type_id': 'rpm'}, repo)
            publish_repo(self.cfg, repo)
            repodata_path = cli_client.run(find_repodata_cmd).stdout.strip()
            found.append(
                cli_client.run(sudo + ('find', repodata_path, '-type',
                                       'f')).stdout.splitlines())
        return found
Beispiel #7
0
    def test_broken_simlinks(self):
        """Test broken symlinks."""
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo(
            importer_config={'feed': RPM_YUM_METADATA_FILE},
            distributors=[gen_distributor(auto_publish=True)]
        )
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        sync_repo(self.cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})

        # Assert that there is a yum_repo_metadata file present in the repo.
        self.assertEqual(
            repo['content_unit_counts']['yum_repo_metadata_file'],
            1,
            repo
        )

        path = os.path.join(
            '/var/lib/pulp/published/yum/https/repos/',
            repo['distributors'][0]['config']['relative_url'],
            'repodata'
        )

        # Assert that the productid was not saved as symlink
        productid_symlink = self.find_productid(True, path)
        self.assertEqual(len(productid_symlink), 0, productid_symlink)

        # Assert that the productid was saved as a file
        productid_file = self.find_productid(False, path)
        self.assertEqual(len(productid_file), 1, productid_symlink)
Beispiel #8
0
    def test_missing_filelists_error_message(self):
        """Test whether an error report contains sufficient information.

        Do the following:

        1. Create and sync a repository using a missing filelist feed URL.
        2. Get a reference to the task containing error information.
        3. Assert that:

           * The error description is sufficiently verbose. See `Pulp #4262`_
           * The traceback is non-null. See `Pulp #1455`_.

        .. _Pulp #4262: https://pulp.plan.io/issues/4262
        """
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.19'):
            raise unittest.SkipTest('This test requires Pulp 2.19 or newer.')

        repo_body = gen_repo(
            importer_config={'feed': RPM_MISSING_FILELISTS_FEED_URL}
        )
        task = self.run_task(repo_body)

        with self.subTest(comment='check task error description'):
            tokens = ['error', 'metadata', 'not', 'found']
            self.assertTrue(
                all(
                    [
                        token
                        in task['error']['description'].lower()
                        for token in tokens
                    ]
                )
            )
Beispiel #9
0
    def test_invalid_feed_error_message(self):
        """Test whether an error report contains sufficient information.

        Do the following:

        1. Create and sync a repository using an invalid feed URL.
        2. Get a reference to the task containing error information.
        3. Assert that:

           * The error description is sufficiently verbose. See `Pulp #1376`_
             and `Pulp Smash #525`_.
           * The traceback is non-null. See `Pulp #1455`_.

        .. _Pulp #1376: https://pulp.plan.io/issues/1376
        .. _Pulp #1455: https://pulp.plan.io/issues/1455
        .. _Pulp Smash #525: https://github.com/PulpQE/pulp-smash/issues/525
        """
        task = self.run_task(gen_repo(importer_config={'feed': utils.uuid4()}))

        with self.subTest(comment='check task error description'):
            tokens = ['scheme', 'must', 'be', 'http', 'https', 'file']
            self.assertTrue(
                all(
                    [
                        token
                        in task['error']['description'].lower()
                        for token in tokens
                    ]
                )
            )
Beispiel #10
0
    def setUpClass(cls):
        """Create a schedule to publish a repo, verify the ``total_run_count``.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 2 minutes
        4. Wait for 130 seconds and read the schedule to get the number of
           "publish" runs
        """
        super().setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        sync_repo(cls.cfg, repo)

        # Schedule a publish to run every 2 minutes
        distributor = gen_distributor()
        client.post(urljoin(repo['_href'], 'distributors/'), distributor)
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        schedule_path = urljoin(repo['_href'], scheduling_url)
        schedule = client.post(schedule_path, {'schedule': 'PT2M'})

        # Wait for publish to run
        time.sleep(130)

        # Read the schedule
        cls.response = client.get(schedule['_href'])
Beispiel #11
0
    def setUpClass(cls):
        """Create a schedule to publish the repository.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 30 seconds
        """
        super().setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        sync_repo(cls.cfg, repo)

        # Schedule a publish to run every 30 seconds
        distributor = gen_distributor()
        distributor_url = urljoin(repo['_href'], 'distributors/')
        client.post(distributor_url, distributor)
        scheduling_url = urljoin(
            distributor_url,
            '{}/schedules/publish/'.format(distributor['distributor_id']),
        )
        cls.response = client.post(scheduling_url, {'schedule': 'PT30S'})
        cls.attrs = cls.response.json()
Beispiel #12
0
    def test_all(self):
        """Verify whether uploaded module.yaml is reflected in the pulp repo."""
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.17'):
            raise unittest.SkipTest(
                'This test requires at least Pulp 2.17 or newer.')
        client = api.Client(cfg, api.json_handler)
        # Create a normal Repo without any data.
        body = gen_repo(importer_config={'feed': RPM_UNSIGNED_FEED_URL},
                        distributors=[gen_distributor()])
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        self.addCleanup(client.delete, repo['_href'])
        sync_repo(cfg, repo)

        # download modules.yaml and upload it to pulp_repo
        unit = self._get_module_yaml_file(RPM_WITH_MODULES_FEED_URL)
        upload_import_unit(cfg, unit, {
            'unit_key': {},
            'unit_type_id': 'modulemd',
        }, repo)
        repo = client.get(repo['_href'], params={'details': True})
        # Assert that `modulemd` and `modulemd_defaults` are present on the
        # repository.
        self.assertIsNotNone(repo['content_unit_counts']['modulemd'])
        self.assertIsNotNone(repo['content_unit_counts']['modulemd_defaults'])
Beispiel #13
0
    def setUpClass(cls):
        """Create an RPM repository, upload package groups, and publish."""
        super().setUpClass()
        if check_issue_3104(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/3104')

        # Create a repository and add a distributor to it.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Generate several package groups, import them into the repository, and
        # publish the repository.
        cls.package_groups = {
            'minimal': _gen_minimal_group(),
            'realistic': _gen_realistic_group(),
        }
        cls.tasks = {}
        for key, package_group in cls.package_groups.items():
            report = _upload_import_package_group(cls.cfg, repo, package_group)
            cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report))
        publish_repo(cls.cfg, repo)

        # Fetch the generated repodata of type 'group' (a.k.a. 'comps')
        cls.root_element = (get_repodata(cls.cfg, repo['distributors'][0],
                                         'group'))
Beispiel #14
0
def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip the tests in this module. Create and sync an RPM repo.

    Skip this module of tests if Pulp is older than version 2.9. (See `Pulp
    #1724`_.) Then create an RPM repository with a feed and sync it. Test cases
    may copy data from this repository but should **not** change it.

    .. _Pulp #1724: https://pulp.plan.io/issues/1724
    """
    set_up_module()
    cfg = config.get_config()
    if cfg.pulp_version < Version('2.9'):
        raise unittest.SkipTest('This module requires Pulp 2.9 or greater.')
    if check_issue_2277(cfg):
        raise unittest.SkipTest('https://pulp.plan.io/issues/2277')

    # Create and sync a repository.
    client = api.Client(cfg, api.json_handler)
    _CLEANUP.append((client.delete, [ORPHANS_PATH], {}))
    body = gen_repo()
    body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
    _REPO.clear()
    _REPO.update(client.post(REPOSITORY_PATH, body))
    _CLEANUP.append((client.delete, [_REPO['_href']], {}))
    try:
        sync_repo(cfg, _REPO)
    except (exceptions.CallReportError, exceptions.TaskReportError,
            exceptions.TaskTimedOutError):
        tearDownModule()
        raise
Beispiel #15
0
 def test_all(self):
     """Package manager can consume RPM with rich/weak dependencies from Pulp."""
     cfg = config.get_config()
     if cfg.pulp_version < Version('2.17'):
         raise unittest.SkipTest('This test requires Pulp 2.17 or newer.')
     if not rpm_rich_weak_dependencies(cfg):
         raise unittest.SkipTest('This test requires RPM 4.12 or newer.')
     client = api.Client(cfg, api.json_handler)
     body = gen_repo(
         importer_config={'feed': RPM_RICH_WEAK_FEED_URL},
         distributors=[gen_distributor()]
     )
     repo = client.post(REPOSITORY_PATH, body)
     self.addCleanup(client.delete, repo['_href'])
     repo = client.get(repo['_href'], params={'details': True})
     sync_repo(cfg, repo)
     publish_repo(cfg, repo)
     repo_path = gen_yum_config_file(
         cfg,
         baseurl=urljoin(cfg.get_base_url(), urljoin(
             'pulp/repos/',
             repo['distributors'][0]['config']['relative_url']
         )),
         name=repo['_href'],
         repositoryid=repo['id']
     )
     cli_client = cli.Client(cfg)
     self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True)
     rpm_name = 'Cobbler'
     pkg_mgr = cli.PackageManager(cfg)
     pkg_mgr.install(rpm_name)
     self.addCleanup(pkg_mgr.uninstall, rpm_name)
     rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-')
     self.assertEqual(rpm_name, rpm[0])
Beispiel #16
0
 def test_all(self):
     """Test that uploading DRPM with checksumtype specified works."""
     if not selectors.bug_is_fixed(1806, self.cfg.pulp_version):
         raise unittest.SkipTest('https://pulp.plan.io/issues/1806')
     if not selectors.bug_is_fixed(2627, self.cfg.pulp_version):
         raise unittest.SkipTest('https://pulp.plan.io/issues/2627')
     client = api.Client(self.cfg)
     repo = client.post(REPOSITORY_PATH, gen_repo()).json()
     self.addCleanup(client.delete, repo['_href'])
     drpm = utils.http_get(DRPM_UNSIGNED_URL)
     upload_import_unit(
         self.cfg,
         drpm,
         {
             'unit_type_id': 'drpm',
             'unit_metadata': {
                 'checksumtype': 'sha256'
             },
         },
         repo,
     )
     units = search_units(self.cfg, repo, {})
     self.assertEqual(len(units), 1, units)
     # Test if DRPM extracted correct metadata for creating filename.
     self.assertEqual(
         units[0]['metadata']['filename'],
         DRPM,
     )
Beispiel #17
0
    def test_all(self):
        """Verify ``RPM_LARGE_METADATA`` RPM file can be uploaded.

        Specifically, this method does the following:

        1. Create an RPM repo.
        2. Verify whether the file ``RPM_LARGE_METADATA`` can be uploaded
           into the repo without errors.

        This test targets:

        * `Pulp #723 <https://pulp.plan.io/issues/723>`_
        * `Pulp-2-Tests #88 <https://github.com/PulpQE/Pulp-2-Tests/issues/88>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)
        body = gen_repo(distributors=[gen_distributor()])
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        rpm = utils.http_get(RPM_LARGE_METADATA_FEED)
        upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
        repo = client.get(repo['_href'], params={'details': True})
        publish_repo(cfg, repo)
        rpm_path = get_rpm_published_path(cfg, repo, RPM_LARGE_METADATA)

        # Check whether the RPM is uploaded published.
        self.assertIn(RPM_LARGE_METADATA, rpm_path, rpm_path)
    def setUpClass(cls):
        """Create an RPM repository and issue a task to download the repo.

        Do the following:

        1. Reset Pulp.
        2. Create a repository with the "on demand" download policy.
        3. Sync the repository.
        4. Trigger a repository download.
        5. Corrupt a file in the repository.
        6. Trigger a repository download, without unit verification.
        7. Trigger a repository download, with unit verification.
        """
        cls.cfg = config.get_config()
        if (not selectors.bug_is_fixed(1905, cls.cfg.pulp_version)
                and os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Ensure Pulp is empty of units otherwise we might just associate pre-
        # existing units.
        reset_pulp(cls.cfg)

        # Create and sync a repository.
        api_client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo(importer_config={
            'feed': RPM_UNSIGNED_FEED_URL,
            'download_policy': 'on_demand'
        })
        cls.repo = api_client.post(REPOSITORY_PATH, body)
        sync_repo(cls.cfg, cls.repo)

        # Trigger a repository download. Read the repo before and after.
        download_path = urljoin(cls.repo['_href'], 'actions/download/')
        params = {'details': True}
        cls.repo_pre_download = api_client.get(cls.repo['_href'],
                                               params=params)
        api_client.post(download_path, {'verify_all_units': False})
        cls.repo_post_download = api_client.get(cls.repo['_href'],
                                                params=params)

        # Corrupt an RPM. The file is there, but the checksum isn't right.
        rpm_abs_path = cls.get_rpm_abs_path()
        cli_client = cli.Client(cls.cfg)
        checksum_cmd = ('sha256sum ' + rpm_abs_path).split()
        cls.sha_pre_corruption = cli_client.run(checksum_cmd,
                                                sudo=True).stdout.strip()
        cli_client.run(('rm ' + rpm_abs_path).split(), sudo=True)
        cli_client.run(('touch ' + rpm_abs_path).split(), sudo=True)
        cli_client.run(('chown apache:apache ' + rpm_abs_path).split(),
                       sudo=True)
        cls.sha_post_corruption = cli_client.run(checksum_cmd,
                                                 sudo=True).stdout.strip()

        # Trigger repository downloads that don't and do checksum files, resp.
        api_client.post(download_path, {'verify_all_units': False})
        cls.unverified_file_sha = cli_client.run(checksum_cmd,
                                                 sudo=True).stdout.strip()
        api_client.post(download_path, {'verify_all_units': True})
        cls.verified_file_sha = cli_client.run(checksum_cmd,
                                               sudo=True).stdout.strip()
Beispiel #19
0
    def test_update_checksum_type(self):
        """Check if RPM distributor can receive null checksum_type.

        See: https://pulp.plan.io/issues/2134.
        """
        cfg = config.get_config()
        if cfg.pulp_version < version.Version('2.9'):
            raise unittest.SkipTest('This test requires Pulp 2.9 or above.')
        client = api.Client(cfg, api.json_handler)
        distributor = gen_distributor()
        body = gen_repo()
        body['distributors'] = [distributor]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        for checksum_type in (None, 'sha256', None):
            client.put(
                repo['_href'], {
                    'distributor_configs': {
                        distributor['distributor_id']: {
                            'checksum_type': checksum_type,
                        }
                    }
                })
            repo = client.get(repo['_href'], params={'details': True})
            self.assertEqual(
                repo['distributors'][0]['config'].get('checksum_type'),
                checksum_type)
    def make_repo(self, cfg, dist_cfg_updates):
        """Create a repository with an importer and pair of distributors.

        Create an RPM repository with:

        * A yum importer with a valid feed.
        * A yum distributor.
        * An RPM rsync distributor referencing the yum distributor.

        In addition, schedule the repository for deletion.

        :param cfg: Information about the
            Pulp deployment being targeted.
        :param dist_cfg_updates: A dict to be merged into the RPM rsync
            distributor's ``distributor_config`` dict. At a minimum, this
            argument should have a value of ``{'remote': {…}}``.
        :returns: A detailed dict of information about the repo.
        """
        api_client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'].append({
            'distributor_id': utils.uuid4(),
            'distributor_type_id': 'rpm_rsync_distributor',
            'distributor_config': {
                'predistributor_id': body['distributors'][0]['distributor_id'],
            }
        })
        body['distributors'][1]['distributor_config'].update(dist_cfg_updates)
        repo = api_client.post(REPOSITORY_PATH, body)
        self.addCleanup(api_client.delete, repo['_href'])
        return api_client.get(repo['_href'], params={'details': True})
Beispiel #21
0
    def test_all(self):
        """Upload a package group to a repository twice."""
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        client = api.Client(cfg, api.json_handler)
        self.addCleanup(client.delete, ORPHANS_PATH)

        # Create a repository.
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Give the repository a package group, and publish the repository.
        package_group = {'id': utils.uuid4(), 'name': utils.uuid4()}
        _upload_import_package_group(cfg, repo, package_group)
        repo = client.get(repo['_href'], params={'details': True})
        publish_repo(cfg, repo)

        # Update the repository's package group, and re-publish the repository.
        package_group['name'] = utils.uuid4()
        _upload_import_package_group(cfg, repo, package_group)
        publish_repo(cfg, repo)

        # Fetch the generated repodata of type 'group' (a.k.a. 'comps'). Verify
        # the package group portion.
        root_element = get_repodata(cfg, repo['distributors'][0], 'group')
        groups = root_element.findall('group')
        self.assertEqual(len(groups), 1, ElementTree.tostring(root_element))
        for key, value in package_group.items():
            with self.subTest(key=key):
                self.assertEqual(groups[0].find(key).text, value)
Beispiel #22
0
    def test_all(self):
        """Verify whether package manager can read module list from a Pulp repo."""
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.17'):
            raise unittest.SkipTest(
                'This test requires at least Pulp 2.17 or newer.')
        if not os_support_modularity(cfg):
            raise unittest.SkipTest(
                'This test requires an OS that supports modularity.')
        client = api.Client(cfg, api.json_handler)
        body = gen_repo(importer_config={'feed': RPM_WITH_MODULES_FEED_URL},
                        distributors=[gen_distributor()])

        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        repo_path = gen_yum_config_file(
            cfg,
            baseurl=urljoin(
                cfg.get_base_url(),
                urljoin('pulp/repos/',
                        repo['distributors'][0]['config']['relative_url'])),
            name=repo['_href'],
            repositoryid=repo['id'])
        cli_client = cli.Client(cfg)
        self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True)
        lines = cli_client.run((('dnf', 'module', 'list', '--all')),
                               sudo=True).stdout.splitlines()
        for key, value in MODULE_FIXTURES_PACKAGES.items():
            with self.subTest(package=key):
                module = [line for line in lines if key in line]
                self.assertEqual(len(module), value, module)
Beispiel #23
0
    def test_all(self):
        """Search contents of a richnweak repository matching package name.

        This test targets `Pulp #3929`_ and `Pulp Smash #901`_. The
        `repository content`_ documentation describes the CLI content syntax.

        .. _Pulp #3929:  https://pulp.plan.io/issues/3929
        .. _Pulp Smash #901: https://github.com/PulpQE/pulp-smash/issues/901
        .. _repository content:
            https://docs.pulpproject.org/en/latest/user-guide/admin-client/repositories.html#content-search

        Asserts the required fields are present.
        """
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.17.1'):
            raise unittest.SkipTest('This test requires Pulp 2.17.1 or newer.')
        api_client = api.Client(cfg, api.json_handler)
        body = gen_repo(
            importer_config={'feed': RPM_RICH_WEAK_FEED_URL},
        )
        repo = api_client.post(REPOSITORY_PATH, body)
        self.addCleanup(api_client.delete, repo['_href'])
        sync_repo(cfg, repo)
        repo = api_client.get(repo['_href'], params={'details': True})

        result = cli.Client(cfg).run(
            'pulp-admin rpm repo content rpm --repo-id {} '
            '--match name=Cobbler'
            .format(repo['id']).split()
        )
        required_fields = ('Recommends:', 'Requires:', 'Provides:')
        for field in required_fields:
            with self.subTest(field=field):
                self.assertEqual(result.stdout.count(field), 1, result)
Beispiel #24
0
def _create_repository(cfg, importer_config):
    """Create an RPM repository with the given importer configuration.

    Return a dict of information about the repository.
    """
    body = gen_repo()
    body['importer_config'] = importer_config
    return api.Client(cfg).post(REPOSITORY_PATH, body).json()
Beispiel #25
0
 def setUp(self):
     """Perform common set-up tasks."""
     self.client = api.Client(self.cfg, api.json_handler)
     body = gen_repo()
     body['importer_config'] = {'feed': RPM_UNSIGNED_FEED_URL}
     repo = self.client.post(REPOSITORY_PATH, body)
     self.addCleanup(self.client.delete, repo['_href'])
     self.repo = self.client.get(repo['_href'], params={'details': True})
Beispiel #26
0
 def test_all(self):
     """Test whether one can upload an RPM with non-ascii metadata."""
     cfg = config.get_config()
     client = api.Client(cfg, api.json_handler)
     repo = client.post(REPOSITORY_PATH, gen_repo())
     self.addCleanup(client.delete, repo['_href'])
     rpm = utils.http_get(RPM_WITH_NON_ASCII_URL)
     upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
Beispiel #27
0
    def test_all(self):
        """Test whether copied files retain their original mtime.

        This test targets the following issues:

        * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_
        * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_

        Do the following:

        1. Create, sync and publish a repository, with ``generate_sqlite`` set
           to true.
        2. Get the ``mtime`` of the sqlite files.
        3. Upload an RPM package into the repository, and sync the repository.
        4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes
           are the same.
        """
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2783, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2783')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config']['generate_sqlite'] = True
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)

        # Get the mtime of the sqlite files.
        cli_client = cli.Client(cfg, cli.echo_handler)
        cmd = '' if cli.is_root(cfg) else 'sudo '
        cmd += "bash -c \"stat --format %Y '{}'/*\"".format(
            os.path.join(
                _PATH,
                repo['distributors'][0]['config']['relative_url'],
                'repodata',
            ))
        # machine.session is used here to keep SSH session open
        mtimes_pre = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())

        # Upload to the repo, and sync it.
        rpm = utils.http_get(RPM_SIGNED_URL)
        upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
        sync_repo(cfg, repo)

        # Get the mtime of the sqlite files again.
        time.sleep(1)
        # machine.session is used here to keep SSH session open
        mtimes_post = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())
        self.assertEqual(mtimes_pre, mtimes_post)
Beispiel #28
0
    def setUpClass(cls):
        """Create several schedules.

        Each schedule is created to test a different failure scenario.
        """
        super().setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        sync_repo(cls.cfg, repo)

        # Add a distibutor
        distributor = gen_distributor()
        client.post(urljoin(repo['_href'], 'distributors/'), distributor)
        client.response_handler = api.echo_handler
        cls.bodies = (
            {
                'schedule': None
            },  # 400
            {
                'unknown': 'parameter',
                'schedule': 'PT30S'
            },  # 400
            ['Incorrect data type'],  # 400
            {
                'missing_required_keys': 'schedule'
            },  # 400
            {
                'schedule': 'PT30S'
            },  # tests incorrect distributor in url, 404
            {
                'schedule': 'PT30S'
            },  # tests incorrect repo in url, 404
        )
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        bad_distributor_url = '/'.join(
            ['distributors',
             utils.uuid4(), 'schedules/publish/'])
        bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()])
        cls.paths = (urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], bad_distributor_url),
                     urljoin(bad_repo_path, scheduling_url))
        cls.status_codes = (400, 400, 400, 400, 404, 404)
        cls.responses = [
            client.post(path, req_body)
            for path, req_body in zip(cls.paths, cls.bodies)
        ]
Beispiel #29
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
     body = gen_repo(importer_config={'feed': RPM_UNSIGNED_FEED_URL},
                     distributors=[gen_distributor()])
     cls.repo = cls.client.post(REPOSITORY_PATH, body)
     cls.repo = cls.client.get(cls.repo['_href'], params={'details': True})
     sync_repo(cls.cfg, cls.repo)
     cls.errata = _gen_errata()
Beispiel #30
0
 def test_all(self):
     """Test whether one can upload an RPM with non-ascii metadata."""
     cfg = config.get_config()
     if not selectors.bug_is_fixed(1903, cfg.pulp_version):
         self.skipTest('https://pulp.plan.io/issues/1903')
     client = api.Client(cfg, api.json_handler)
     repo = client.post(REPOSITORY_PATH, gen_repo())
     self.addCleanup(client.delete, repo['_href'])
     rpm = utils.http_get(RPM_WITH_NON_UTF_8_URL)
     upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)