Beispiel #1
0
    def test_01_set_up(self):
        """Create, sync and publish a Docker repository.

        Specifically, do the following:

        1. Create, sync and publish a Docker repository. Let the repository's
           upstream name reference a repository that has an image with a
           manifest list and no amd64/linux build.
        2. Make Crane immediately re-read the metadata files published by Pulp.
           (Restart Apache.)
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            # DOCKER_UPSTREAM_NAME (dmage/manifest-list-test) has an image
            # without any amd64/linux build. However, it has a v1 manifest.
            'upstream_name': 'dmage/busybox',
        })
        body['distributors'] = [gen_distributor()]
        type(self).repo = client.post(REPOSITORY_PATH, body)
        type(self).repo = client.get(self.repo['_href'],
                                     params={'details': True})
        sync_repo(self.cfg, self.repo)
        publish_repo(self.cfg, self.repo)

        # Make Crane read metadata. (Now!)
        cli.GlobalServiceManager(self.cfg).restart(('httpd', ))
Beispiel #2
0
 def test_all(self):
     """Sync two repositories w/identical content but differing layouts."""
     cfg = config.get_config()
     if check_issue_3104(cfg):
         self.skipTest('https://pulp.plan.io/issues/3104')
     if check_issue_2798(cfg):
         self.skipTest('https://pulp.plan.io/issues/2798')
     if check_issue_2354(cfg):
         self.skipTest('https://pulp.plan.io/issues/2354')
     if (os_is_f26(cfg)
             and not selectors.bug_is_fixed(3036, cfg.pulp_version)):
         # Here, the calls to get_unit() cause pulp_streamer.service to die
         # without logging out anything. In Pulp #3036, certain actions
         # cause pulp_streamer.service to die while logging out a core dump.
         # Thus, this test failure might be unrelated to Pulp #3036.
         self.skipTest('https://pulp.plan.io/issues/3036')
     repos = [
         self.create_repo(cfg, feed, 'on_demand')
         for feed in (RPM_ALT_LAYOUT_FEED_URL, RPM_UNSIGNED_FEED_URL)
     ]
     for repo in repos:
         sync_repo(cfg, repo)
     for repo in repos:
         publish_repo(cfg, repo)
     rpms = []
     for repo in repos:
         with self.subTest(repo=repo):
             rpms.append(
                 get_unit(cfg, repo['distributors'][0], RPM).content)
     self.assertEqual(len(rpms), len(repos))
     self.assertEqual(rpms[0], rpms[1], repos)
Beispiel #3
0
    def test_01_first_repo(self):
        """Create, sync content into and publish a Python repository.

        See:

        * `Pulp #135 <https://pulp.plan.io/issues/135>`_
        * `Pulp #3578 <https://pulp.plan.io/issues/3578>`_
        * `Pulp #3769 <https://pulp.plan.io/issues/3769>`_
        * `Pulp Smash #494 <https://github.com/PulpQE/pulp-smash/issues/494>`_
        """
        if (self.cfg.pulp_version < Version('2.17')
                or not selectors.bug_is_fixed(3578, self.cfg.pulp_version)):
            self.skipTest('https://pulp.plan.io/issues/3578')
        if not selectors.bug_is_fixed(135, self.cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/135')
        if (utils.fips_is_supported(self.cfg)
                and utils.fips_is_enabled(self.cfg)
                and not selectors.bug_is_fixed(3769, self.cfg.pulp_version)):
            self.skipTest('https://pulp.plan.io/issues/3769')
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = {
            'feed': PYTHON_PYPI_FEED_URL,
            'package_names': 'shelf-reader',
        }
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.repos.append(repo)
        call_report = sync_repo(self.cfg, repo)
        with self.subTest(comment='verify the sync succeeded'):
            self.verify_sync(self.cfg, call_report)
        with self.subTest(comment='verify content units are present'):
            self.verify_package_types(self.cfg, repo)
        repo = get_details(self.cfg, repo)
        publish_repo(self.cfg, repo)
Beispiel #4
0
 def test_01_add_unit(self):
     """Add a content unit to the repository. Publish the repository."""
     repo_before = self.get_repo()
     rpm = utils.http_get(RPM_UNSIGNED_URL)
     upload_import_unit(
         self.cfg,
         rpm,
         {'unit_type_id': 'rpm'},
         self.repo,
     )
     publish_repo(self.cfg, repo_before)
     repo_after = self.get_repo()
     with self.subTest(comment='last_unit_added'):
         if not selectors.bug_is_fixed(1847, self.cfg.pulp_version):
             self.skipTest('https://pulp.plan.io/issues/1847')
         pre = repo_before['last_unit_added']
         post = repo_after['last_unit_added']
         self.assertIsNone(pre)
         self.assertIsNotNone(post)
     with self.subTest(comment='last_unit_removed'):
         pre = repo_before['last_unit_removed']
         post = repo_after['last_unit_removed']
         self.assertIsNone(pre)
         self.assertIsNone(post)
     with self.subTest(comment='last_publish'):
         pre = repo_before['distributors'][0]['last_publish']
         post = repo_after['distributors'][0]['last_publish']
         self.assertIsNone(pre)
         self.assertIsNotNone(post)
Beispiel #5
0
    def test_all(self):
        """Verify ``RPM_LARGE_METADATA`` RPM file can be uploaded.

        Specifically, this method does the following:

        1. Create an RPM repo.
        2. Verify whether the file ``RPM_LARGE_METADATA`` can be uploaded
           into the repo without errors.

        This test targets:

        * `Pulp #723 <https://pulp.plan.io/issues/723>`_
        * `Pulp-2-Tests #88 <https://github.com/PulpQE/Pulp-2-Tests/issues/88>`_
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)
        body = gen_repo(distributors=[gen_distributor()])
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        rpm = utils.http_get(RPM_LARGE_METADATA_FEED)
        upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
        repo = client.get(repo['_href'], params={'details': True})
        publish_repo(cfg, repo)
        rpm_path = get_rpm_published_path(cfg, repo, RPM_LARGE_METADATA)

        # Check whether the RPM is uploaded published.
        self.assertIn(RPM_LARGE_METADATA, rpm_path, rpm_path)
Beispiel #6
0
    def test_all(self):
        """Verify whether package manager can read module list from a Pulp repo."""
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.17'):
            raise unittest.SkipTest(
                'This test requires at least Pulp 2.17 or newer.')
        if not os_support_modularity(cfg):
            raise unittest.SkipTest(
                'This test requires an OS that supports modularity.')
        client = api.Client(cfg, api.json_handler)
        body = gen_repo(importer_config={'feed': RPM_WITH_MODULES_FEED_URL},
                        distributors=[gen_distributor()])

        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        repo_path = gen_yum_config_file(
            cfg,
            baseurl=urljoin(
                cfg.get_base_url(),
                urljoin('pulp/repos/',
                        repo['distributors'][0]['config']['relative_url'])),
            name=repo['_href'],
            repositoryid=repo['id'])
        cli_client = cli.Client(cfg)
        self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True)
        lines = cli_client.run((('dnf', 'module', 'list', '--all')),
                               sudo=True).stdout.splitlines()
        for key, value in MODULE_FIXTURES_PACKAGES.items():
            with self.subTest(package=key):
                module = [line for line in lines if key in line]
                self.assertEqual(len(module), value, module)
Beispiel #7
0
    def test_all(self):
        """Add a content unit to a repo in the middle of several publishes."""
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2532, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2532')
        rpms = (utils.http_get(RPM_UNSIGNED_URL),
                utils.http_get(RPM2_UNSIGNED_URL))

        # Create a user and a repository.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })

        # Add content, publish w/yum, add more content, publish w/rsync.
        dists = get_dists_by_type_id(cfg, repo)
        for i, key in enumerate(('yum_distributor', 'rpm_rsync_distributor')):
            upload_import_unit(cfg, rpms[i], {'unit_type_id': 'rpm'}, repo)
            publish_repo(cfg, repo, {'id': dists[key]['id']})
        self.verify_remote_units_path(cfg, dists['rpm_rsync_distributor'], 1)

        # Publish with yum and rsync, respectively.
        for key in 'yum_distributor', 'rpm_rsync_distributor':
            publish_repo(cfg, repo, {'id': dists[key]['id']})
        self.verify_remote_units_path(cfg, dists['rpm_rsync_distributor'], 1)
Beispiel #8
0
    def test_all(self):
        """Publish the rpm rsync distributor before the yum distributor."""
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2187, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2187')

        # Create a user and a repository.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })

        # Publish with the rsync distributor.
        distribs = get_dists_by_type_id(cfg, repo)
        args = (cfg, repo, {'id': distribs['rpm_rsync_distributor']['id']})
        self.verify_publish_is_skip(cfg, publish_repo(*args).json())

        # Verify that the rsync distributor hasn't placed files.
        dirs = self.remote_root_files(cfg, distribs['rpm_rsync_distributor'])
        self.assertNotIn('content', dirs)

        # Publish with the rsync distributor again, and verify again.
        if selectors.bug_is_fixed(2722, cfg.pulp_version):
            self.verify_publish_is_skip(cfg, publish_repo(*args).json())
            dirs = self.remote_root_files(cfg,
                                          distribs['rpm_rsync_distributor'])
            self.assertNotIn('content', dirs)
Beispiel #9
0
    def test_all(self):
        """Upload a package group to a repository twice."""
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        client = api.Client(cfg, api.json_handler)
        self.addCleanup(client.delete, ORPHANS_PATH)

        # Create a repository.
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Give the repository a package group, and publish the repository.
        package_group = {'id': utils.uuid4(), 'name': utils.uuid4()}
        _upload_import_package_group(cfg, repo, package_group)
        repo = client.get(repo['_href'], params={'details': True})
        publish_repo(cfg, repo)

        # Update the repository's package group, and re-publish the repository.
        package_group['name'] = utils.uuid4()
        _upload_import_package_group(cfg, repo, package_group)
        publish_repo(cfg, repo)

        # Fetch the generated repodata of type 'group' (a.k.a. 'comps'). Verify
        # the package group portion.
        root_element = get_repodata(cfg, repo['distributors'][0], 'group')
        groups = root_element.findall('group')
        self.assertEqual(len(groups), 1, ElementTree.tostring(root_element))
        for key, value in package_group.items():
            with self.subTest(key=key):
                self.assertEqual(groups[0].find(key).text, value)
Beispiel #10
0
    def test_broken_symlinks(self):
        """Assert that the rsync yum metadata is not a symlink."""
        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(self.cfg)
        ssh_identity_file = self.write_private_key(self.cfg, priv_key)
        repo = self.make_repo(
            self.cfg, {
                'remote': {
                    'host': urlparse(self.cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            }, RPM_YUM_METADATA_FILE)
        sync_repo(self.cfg, repo)

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = get_dists_by_type_id(self.cfg, repo)
        self.maybe_disable_selinux(self.cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            publish_repo(self.cfg, repo, {'id': distribs[type_id]['id']})
        path = os.path.join(
            distribs['rpm_rsync_distributor']['config']['remote']['root'],
            distribs['yum_distributor']['config']['relative_url'], 'repodata')

        # Assert that the productid was not saved as symlink
        productid_symlink = self.find_productid(True, path)
        self.assertEqual(len(productid_symlink), 0, productid_symlink)

        # Assert that the productid was saved as a file
        productid_file = self.find_productid(False, path)
        self.assertEqual(len(productid_file), 1, productid_file)
    def test_non_matching_query(self):
        """Sync a repository with a query that doesn't match any units.

        Assert that:

        * None of the sync tasks has an error message.
        * Searching for module ``pulp_2_tests.constants.PUPPET_MODULE_2``
          yields no results.
        """
        # Create and sync a repository.
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = {
            'feed': PUPPET_FEED_2,
            'queries': [PUPPET_QUERY_2.replace('-', '_')],
        }
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        self._sync_repo(repo)

        # Publish the repository.
        publish_repo(self.cfg, repo)
        module = '/'.join((PUPPET_MODULE_2['author'], PUPPET_MODULE_2['name']))
        with self.assertRaises(HTTPError):
            client.get(
                '/v3/releases',
                auth=('repository', repo['id']),
                params={'module': module},
            )
Beispiel #12
0
    def setUpClass(cls):
        """Create an RPM repository, upload package groups, and publish."""
        super().setUpClass()
        if check_issue_3104(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/3104')

        # Create a repository and add a distributor to it.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Generate several package groups, import them into the repository, and
        # publish the repository.
        cls.package_groups = {
            'minimal': _gen_minimal_group(),
            'realistic': _gen_realistic_group(),
        }
        cls.tasks = {}
        for key, package_group in cls.package_groups.items():
            report = _upload_import_package_group(cls.cfg, repo, package_group)
            cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report))
        publish_repo(cls.cfg, repo)

        # Fetch the generated repodata of type 'group' (a.k.a. 'comps')
        cls.root_element = (get_repodata(cls.cfg, repo['distributors'][0],
                                         'group'))
Beispiel #13
0
    def do_test(self, distributor_config_update):
        """Implement most of the test logic."""
        rpms = tuple(
            utils.http_get(url)
            for url in (RPM_UNSIGNED_URL, RPM2_UNSIGNED_URL))

        # Create a repository.
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config'].update(
            distributor_config_update)
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Upload an RPM, publish the repo, and count metadata files twice.
        cli_client = cli.Client(self.cfg)
        sudo = () if cli.is_root(self.cfg) else ('sudo', )
        find_repodata_cmd = sudo + (
            'find',
            os.path.join('/var/lib/pulp/published/yum/master/yum_distributor/',
                         str(repo['id'])), '-type', 'd', '-name', 'repodata')
        found = []
        for rpm in rpms:
            upload_import_unit(self.cfg, rpm, {'unit_type_id': 'rpm'}, repo)
            publish_repo(self.cfg, repo)
            repodata_path = cli_client.run(find_repodata_cmd).stdout.strip()
            found.append(
                cli_client.run(sudo + ('find', repodata_path, '-type',
                                       'f')).stdout.splitlines())
        return found
Beispiel #14
0
 def test_all(self):
     """Package manager can consume RPM with rich/weak dependencies from Pulp."""
     cfg = config.get_config()
     if cfg.pulp_version < Version('2.17'):
         raise unittest.SkipTest('This test requires Pulp 2.17 or newer.')
     if not rpm_rich_weak_dependencies(cfg):
         raise unittest.SkipTest('This test requires RPM 4.12 or newer.')
     client = api.Client(cfg, api.json_handler)
     body = gen_repo(
         importer_config={'feed': RPM_RICH_WEAK_FEED_URL},
         distributors=[gen_distributor()]
     )
     repo = client.post(REPOSITORY_PATH, body)
     self.addCleanup(client.delete, repo['_href'])
     repo = client.get(repo['_href'], params={'details': True})
     sync_repo(cfg, repo)
     publish_repo(cfg, repo)
     repo_path = gen_yum_config_file(
         cfg,
         baseurl=urljoin(cfg.get_base_url(), urljoin(
             'pulp/repos/',
             repo['distributors'][0]['config']['relative_url']
         )),
         name=repo['_href'],
         repositoryid=repo['id']
     )
     cli_client = cli.Client(cfg)
     self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True)
     rpm_name = 'Cobbler'
     pkg_mgr = cli.PackageManager(cfg)
     pkg_mgr.install(rpm_name)
     self.addCleanup(pkg_mgr.uninstall, rpm_name)
     rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-')
     self.assertEqual(rpm_name, rpm[0])
Beispiel #15
0
    def test_all(self):
        """Test whether copied files retain their original mtime.

        This test targets the following issues:

        * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_
        * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_

        Do the following:

        1. Create, sync and publish a repository, with ``generate_sqlite`` set
           to true.
        2. Get the ``mtime`` of the sqlite files.
        3. Upload an RPM package into the repository, and sync the repository.
        4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes
           are the same.
        """
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2783, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2783')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config']['generate_sqlite'] = True
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)

        # Get the mtime of the sqlite files.
        cli_client = cli.Client(cfg, cli.echo_handler)
        cmd = '' if cli.is_root(cfg) else 'sudo '
        cmd += "bash -c \"stat --format %Y '{}'/*\"".format(
            os.path.join(
                _PATH,
                repo['distributors'][0]['config']['relative_url'],
                'repodata',
            ))
        # machine.session is used here to keep SSH session open
        mtimes_pre = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())

        # Upload to the repo, and sync it.
        rpm = utils.http_get(RPM_SIGNED_URL)
        upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
        sync_repo(cfg, repo)

        # Get the mtime of the sqlite files again.
        time.sleep(1)
        # machine.session is used here to keep SSH session open
        mtimes_post = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())
        self.assertEqual(mtimes_pre, mtimes_post)
Beispiel #16
0
    def test_all(self):
        """Sync a repo whose updateinfo file has multiple pkglist sections.

        Specifically, do the following:

        1. Create, sync and publish an RPM repository whose feed is set to
           ``pulp_2_tests.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL``.
        2. Fetch and parse the published repository's ``updateinfo.xml`` file.

        Verify that the ``updateinfo.xml`` file has three packages whose
        ``<filename>`` elements have the following text:

        * penguin-0.9.1-1.noarch.rpm
        * shark-0.1-1.noarch.rpm
        * walrus-5.21-1.noarch.rpm

        Note that Pulp is free to change the structure of a source repository
        at will. For example, the source repository has three ``<collection>``
        elements, the published repository can have one, two or three
        ``<collection>`` elements. Assertions are not made about these details.
        """
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        if not selectors.bug_is_fixed(2227, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2277')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)

        # Fetch and parse ``updateinfo.xml``.
        updates_element = (get_repodata(cfg, repo['distributors'][0],
                                        'updateinfo'))

        # Verify the ``updateinfo.xml`` file.
        debug = ElementTree.tostring(updates_element)
        filename_elements = (updates_element.findall(
            'update/pkglist/collection/package/filename'))
        filenames = [
            filename_element.text for filename_element in filename_elements
        ]
        filenames.sort()
        self.assertEqual(filenames, [
            'penguin-0.9.1-1.noarch.rpm',
            'shark-0.1-1.noarch.rpm',
            'walrus-5.21-1.noarch.rpm',
        ], debug)
Beispiel #17
0
    def test_02_copy_publish(self):
        """Copy and RPM from the first repo to the second, and publish it.

        Execute :meth:`verify_repo_search` and :meth:`verify_repo_download`.
        """
        api.Client(self.cfg).post(
            urljoin(self.repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': self.repos[0]['id']})
        publish_repo(self.cfg, self.repos[1])
        self.verify_repo_search(self.repos[1])
        self.verify_repo_download(self.repos[1])
Beispiel #18
0
    def test_matching_query(self):
        """Sync a repository with a query that matches units.

        Assert that:

        * None of the sync tasks has an error message.
        * Searching for module ``pulp_2_tests.constants.PUPPET_MODULE_2``
          yields one result.
        * The synced-in module can be downloaded.
        """
        cfg = config.get_config()
        if not selectors.bug_is_fixed(3692, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/3692')

        # Create and sync a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = {
            'feed': PUPPET_FEED_2,
            'queries': [PUPPET_QUERY_2],
        }
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        self._sync_repo(repo)

        # Publish the repository.
        publish_repo(cfg, repo)
        module = '/'.join((PUPPET_MODULE_2['author'], PUPPET_MODULE_2['name']))
        response = client.get(
            '/v3/releases',
            auth=('repository', repo['id']),
            params={'module': module},
        )
        self.assertEqual(len(response['results']), 1)

        # Download the Puppet module directly from feed url.
        latest_version = response['results'][0]['metadata']['version']
        module_file = utils.http_get(PUPPET_MODULE_URL_2 % latest_version)

        client.response_handler = api.safe_handler
        # Download the Puppet module stored by Pulp.
        file_response = client.get(response['results'][0]['file_uri'])

        # Assert the files are the same.
        with self.subTest():
            self.assertEqual(module_file, file_response.content)
        with self.subTest():
            self.assertIn(
                file_response.headers['content-type'],
                ('application/gzip', 'application/x-gzip')
            )
Beispiel #19
0
 def test_01_set_up(self):
     """Create and publish a repo, and fetch and parse its ``repomd.xml``."""
     client = api.Client(self.cfg, api.json_handler)
     body = gen_repo()
     body['distributors'] = [gen_distributor()]
     self.repo.update(client.post(REPOSITORY_PATH, body))
     self.repo.update(client.get(self.repo['_href'], params={'details': True}))
     publish_repo(self.cfg, self.repo)
     type(self).root_element = get_repodata_repomd_xml(
         self.cfg,
         self.repo['distributors'][0],
     )
    def test_all(self):
        """Use the ``force_full`` RPM rsync distributor option."""
        cfg = config.get_config()
        cli_client = cli.Client(cfg)
        sudo = '' if cli.is_root(cfg) else 'sudo '

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })
        sync_repo(cfg, repo)

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = get_dists_by_type_id(cfg, repo)
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            publish_repo(cfg, repo, {'id': distribs[type_id]['id']})
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])

        # Remove all files from the target directory, and publish again. Verify
        # that the RPM rsync distributor didn't place any files.
        cmd = sudo + 'rm -rf /home/{}/content'.format(ssh_user)
        cli_client.run(cmd.split())
        self.verify_publish_is_skip(
            cfg,
            publish_repo(cfg, repo, {
                'id': distribs['rpm_rsync_distributor']['id']
            }).json())
        dirs = self.remote_root_files(cfg, distribs['rpm_rsync_distributor'])
        self.assertNotIn('content', dirs)

        # Publish the repo with ``force_full`` set to true. Verify that the RPM
        # rsync distributor placed files.
        if not selectors.bug_is_fixed(2202, cfg.pulp_version):
            return
        publish_repo(
            cfg, repo, {
                'id': distribs['rpm_rsync_distributor']['id'],
                'override_config': {
                    'force_full': True
                },
            })
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])
Beispiel #21
0
    def test_update_on_copy(self):
        """Check if copying units into a repo updates ``last_unit_added``.

        Do the following:

        1. Create a repository with a feed and sync it.
        2. Create a second repository. Assert the second repository's
           ``last_unit_added`` attribute is null.
        3. Copy a content unit from first repository to the second. Assert the
           second repository's ``last_unit_added`` attribute is non-null.
        4. Publish the second repository. Assert its ``last_unit_added``
           attribute is non-null.
        """
        if not selectors.bug_is_fixed(2688, self.cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2688')

        # create a repo with a feed and sync it
        sync_repo(self.cfg, self.repo)
        self.repo = self.client.get(self.repo['_href'],
                                    params={'details': True})

        # create a second repository
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo2 = self.client.post(REPOSITORY_PATH, body)
        self.addCleanup(self.client.delete, repo2['_href'])
        repo2 = self.client.get(repo2['_href'], params={'details': True})
        with self.subTest(comment='after repository creation'):
            self.assertIsNone(repo2['last_unit_added'])

        # copy a content unit from the first repo to the second
        self.client.post(
            urljoin(repo2['_href'], 'actions/associate/'), {
                'source_repo_id': self.repo['id'],
                'criteria': {
                    'filters': {
                        'unit': {
                            'name': 'bear'
                        }
                    },
                    'type_ids': ['rpm'],
                },
            })
        repo2 = self.client.get(repo2['_href'], params={'details': True})
        with self.subTest(comment='after unit association'):
            self.assertIsNotNone(repo2['last_unit_added'], repo2)

        # publish the second repo
        publish_repo(self.cfg, repo2)
        repo2 = self.client.get(repo2['_href'], params={'details': True})
        with self.subTest(comment='after repository publish'):
            self.assertIsNotNone(repo2['last_unit_added'], repo2)
Beispiel #22
0
 def test_all(self):
     """Execute the test case business logic."""
     cfg = config.get_config()
     if check_issue_3104(cfg):
         self.skipTest('https://pulp.plan.io/issues/3104')
     self.check_issue_2277(cfg)
     self.check_issue_2321(cfg)
     repo = self.create_repo(cfg, RPM_MIRRORLIST_MIXED, _gen_rel_url())
     sync_repo(cfg, repo)
     publish_repo(cfg, repo)
     actual_rpm = get_unit(cfg, repo['distributors'][0], RPM).content
     target_rpm = utils.http_get(RPM_UNSIGNED_URL)
     self.assertEqual(actual_rpm, target_rpm)
Beispiel #23
0
    def create_sync_publish_repo(self, body):
        """Create, sync and publish a repository.

        Also, schedule the repository for deletion.

        :param body: A dict of information to use when creating the repository.
        :return: A detailed dict of information about the repository.
        """
        repo = self.client.post(REPOSITORY_PATH, body)
        self.addCleanup(self.client.delete, repo['_href'])
        repo = self.client.get(repo['_href'], params={'details': True})
        sync_repo(self.cfg, repo)
        publish_repo(self.cfg, repo)
        return repo
Beispiel #24
0
    def do_test(self, feed):
        """Verify ``checksum_type`` is updated on the repo metadata."""
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        client = api.Client(cfg, api.json_handler)

        # Create and sync a repository.
        body = gen_repo()
        body['importer_config']['feed'] = feed
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        sync_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        distributor = repo['distributors'][0]

        # Update checksum type to be "sha256" and publish the repository.
        client.put(distributor['_href'],
                   {'distributor_config': {
                       'checksum_type': 'sha256'
                   }})
        publish_repo(cfg, repo)
        with self.subTest(comment='primary.xml'):
            self.verify_primary_xml(cfg, distributor, 'sha256')
        with self.subTest(comment='filelists.xml'):
            self.verify_filelists_xml(cfg, distributor, 'sha256')
        with self.subTest(comment='other.xml'):
            self.verify_other_xml(cfg, distributor, 'sha256')
        if feed == DRPM_UNSIGNED_FEED_URL:
            with self.subTest(comment='prestodelta.xml'):
                self.verify_presto_delta_xml(cfg, distributor, 'sha256')

        # Update the checksum type to "sha1", and re-publish the repository.
        client.put(distributor['_href'], {
            'distributor_config': {
                'checksum_type': 'sha1',
                'force_full': True
            }
        })
        publish_repo(cfg, repo)
        with self.subTest(comment='primary.xml'):
            self.verify_primary_xml(cfg, distributor, 'sha1')
        with self.subTest(comment='filelists.xml'):
            self.verify_filelists_xml(cfg, distributor, 'sha1')
        with self.subTest(comment='other.xml'):
            self.verify_other_xml(cfg, distributor, 'sha1')
        if feed == DRPM_UNSIGNED_FEED_URL:
            with self.subTest(comment='prestodelta.xml'):
                self.verify_presto_delta_xml(cfg, distributor, 'sha1')
    def test_all(self):
        """Publish with a yum and rsync distributor twice."""
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2666, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2666')
        if check_issue_2844(cfg):
            self.skipTest('https://pulp.plan.io/issues/2844')

        # Create a user and a repository.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })

        # Add content.
        for url in (RPM_UNSIGNED_URL, RPM2_UNSIGNED_URL):
            upload_import_unit(cfg, utils.http_get(url),
                               {'unit_type_id': 'rpm'}, repo)
        dists = get_dists_by_type_id(cfg, repo)

        # See https://pulp.plan.io/issues/2844#note-11
        time.sleep(2)

        # Publish with yum and rsync.
        for dist in 'yum_distributor', 'rpm_rsync_distributor':
            report = (publish_repo(cfg, repo, {
                'id': dists[dist]['id']
            }).json())
            publish_task = self.get_publish_task(cfg, report)
        num_processed = self.get_num_processed(publish_task)
        with self.subTest(comment='first rsync publish'):
            self.assertEqual(num_processed, 2, publish_task)

        # Publish with yum and rsync again.
        for dist in 'yum_distributor', 'rpm_rsync_distributor':
            report = (publish_repo(cfg, repo, {
                'id': dists[dist]['id']
            }).json())
            publish_task = self.get_publish_task(cfg, report)
        num_processed = self.get_num_processed(publish_task)
        with self.subTest(comment='second rsync publish'):
            self.assertEqual(num_processed, 0, publish_task)
Beispiel #26
0
    def test_01_upload_publish(self):
        """Upload an RPM to the first repository, and publish it.

        Execute :meth:`verify_repo_search` and :meth:`verify_repo_download`.
        """
        repo = self.repos[0]
        upload_import_unit(
            self.cfg,
            self.rpm,
            {'unit_type_id': 'rpm'},
            repo,
        )
        publish_repo(self.cfg, repo)
        self.verify_repo_search(repo)
        self.verify_repo_download(repo)
Beispiel #27
0
    def test_all(self):
        """Test puppet_install_distributor.

        Do the following:

        1. Create a puppet repository with a puppet_install_distributor
        2. Upload a puppet module
        3. Publish the repository
        4. Check if the puppet_install_distributor config was properly used
        """
        cfg = config.get_config()
        if (not selectors.bug_is_fixed(3314, cfg.pulp_version)
                and os_is_f27(cfg)):
            self.skipTest('https://pulp.plan.io/issues/3314')
        cli_client = cli.Client(cfg)

        # Create a directory and make sure Pulp can write to it.
        install_path = cli_client.run(('mktemp', '--directory')).stdout.strip()
        self.addCleanup(cli_client.run, ('rm', '-rf', install_path), sudo=True)
        cli_client.run(('chown', 'apache:apache', install_path), sudo=True)
        cli_client.run(('chcon', '-t', 'puppet_etc_t', install_path),
                       sudo=True)

        # Make sure the pulp_manage_puppet boolean is enabled
        cli_client.run(('setsebool', 'pulp_manage_puppet', 'on'), sudo=True)

        self.addCleanup(cli_client.run,
                        ('setsebool', 'pulp_manage_puppet', 'off'),
                        sudo=True)

        # Create and populate a Puppet repository.
        distributor = gen_install_distributor()
        distributor['distributor_config']['install_path'] = install_path
        body = gen_repo()
        body['distributors'] = [distributor]
        client = api.Client(cfg, api.json_handler)
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        unit = utils.http_get(PUPPET_MODULE_URL_1)
        upload_import_unit(cfg, unit, {'unit_type_id': 'puppet_module'}, repo)

        # Publish, and verify the module is present. (Dir has 700 permissions.)
        publish_repo(cfg, repo)
        proc = cli_client.run(('runuser', '--shell', '/bin/sh', '--command',
                               'ls -1 {}'.format(install_path), '-', 'apache'),
                              sudo=True)
        self.assertIn(PUPPET_MODULE_1['name'], proc.stdout.split('\n'), proc)
Beispiel #28
0
    def health_check(self):
        """Execute step three of the test plan."""
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(self.cfg, repo)
        publish_repo(self.cfg, repo)
        pulp_rpm = get_unit(self.cfg, repo['distributors'][0], RPM).content

        # Does this RPM match the original RPM?
        rpm = utils.http_get(RPM_SIGNED_URL)
        self.assertEqual(rpm, pulp_rpm)
Beispiel #29
0
    def _create_sync_publish_repo(self, cfg):
        """Create, sync and publish a repository.

        Also, schedule it for deletion. Return a detailed dict of information
        about the repository.
        """
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)
        return repo
Beispiel #30
0
    def test_sync_publish_repo(self):
        """Test sync and publish modular RPM repository."""
        repo = self.create_sync_modular_repo()
        # Assert that `modulemd` and `modulemd_defaults` are present on the
        # repository.
        self.assertIsNotNone(repo['content_unit_counts']['modulemd'])
        self.assertIsNotNone(repo['content_unit_counts']['modulemd_defaults'])

        publish_repo(self.cfg, repo)

        get_repodata(
            self.cfg,
            repo['distributors'][0],
            'modules',
            api.safe_handler,
        )