Esempio n. 1
0
    def test_non_matching_query(self):
        """Sync a repository with a query that doesn't match any units.

        Assert that:

        * None of the sync tasks has an error message.
        * Searching for module :data:`pulp_smash.constants.PUPPET_MODULE_2`
          yields no results.
        """
        # Create and sync a repository.
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = {
            'feed': PUPPET_FEED_2,
            'queries': [PUPPET_QUERY_2.replace('-', '_')],
        }
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        self.sync_repo(repo)

        # Publish the repository.
        utils.publish_repo(self.cfg, repo)
        module = '/'.join((PUPPET_MODULE_2['author'], PUPPET_MODULE_2['name']))
        with self.assertRaises(HTTPError):
            client.get(
                '/v3/releases',
                auth=('repository', repo['id']),
                params={'module': module},
            )
Esempio n. 2
0
    def setUpClass(cls):
        """Generate, fetch and parse a ``repomd.xml`` file.

        Do the following:

        1. Create an RPM repository with a YUM distributor and publish it.
        2. Fetch the ``repomd.xml`` file from the distributor, and parse it.
        """
        super(RepoMDTestCase, cls).setUpClass()
        if check_issue_3104(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/3104')
        if check_issue_2277(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/2277')

        # Create a repository with a yum distributor and publish it.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        cls.resources.add(repo['_href'])
        utils.publish_repo(cls.cfg, repo)

        # Fetch and parse repomd.xml
        client.response_handler = xml_handler
        path = urljoin(
            '/pulp/repos/',
            repo['distributors'][0]['config']['relative_url'],
        )
        path = urljoin(path, 'repodata/repomd.xml')
        cls.root_element = client.get(path)
Esempio n. 3
0
    def test_all(self):
        """Create, sync and publish an OSTree repository.

        Verify that:

        * The distributor's ``last_publish`` attribute is ``None`` after the
          sync. This demonstrates that ``auto_publish`` correctly defaults to
          ``False``.
        * The distributor's ``last_publish`` attribute is not ``None`` after
          the publish.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        # Create a repository.
        body = gen_repo()
        body['importer_config']['feed'] = OSTREE_FEED
        body['importer_config']['branches'] = [OSTREE_BRANCH]
        body['distributors'].append(gen_distributor())
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Sync the repository.
        utils.sync_repo(cfg, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after sync'):
            self.assertIsNone(repo['distributors'][0]['last_publish'])

        # Publish the repository.
        utils.publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after publish'):
            self.assertIsNotNone(repo['distributors'][0]['last_publish'])
Esempio n. 4
0
 def test_all(self):
     """Sync two repositories w/identical content but differing layouts."""
     cfg = config.get_config()
     if check_issue_3104(cfg):
         self.skipTest('https://pulp.plan.io/issues/3104')
     if check_issue_2798(cfg):
         self.skipTest('https://pulp.plan.io/issues/2798')
     if check_issue_2354(cfg):
         self.skipTest('https://pulp.plan.io/issues/2354')
     if (utils.os_is_f26(cfg)
             and selectors.bug_is_untestable(3036, cfg.pulp_version)):
         # Here, the calls to get_unit() cause pulp_streamer.service to die
         # without logging out anything. In Pulp #3036, certain actions
         # cause pulp_streamer.service to die while logging out a core dump.
         # Thus, this test failure might be unrelated to Pulp #3036.
         self.skipTest('https://pulp.plan.io/issues/3036')
     repos = [
         self.create_repo(cfg, feed, 'on_demand')
         for feed in (RPM_ALT_LAYOUT_FEED_URL, RPM_UNSIGNED_FEED_URL)
     ]
     for repo in repos:
         utils.sync_repo(cfg, repo)
     for repo in repos:
         utils.publish_repo(cfg, repo)
     rpms = []
     for repo in repos:
         with self.subTest(repo=repo):
             rpms.append(
                 get_unit(cfg, repo['distributors'][0], RPM).content)
     self.assertEqual(len(rpms), len(repos))
     self.assertEqual(rpms[0], rpms[1], repos)
Esempio n. 5
0
    def test_all(self):
        """Upload a package group to a repository twice."""
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        client = api.Client(cfg, api.json_handler)
        self.addCleanup(client.delete, ORPHANS_PATH)

        # Create a repository.
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Give the repository a package group, and publish the repository.
        package_group = {'id': utils.uuid4(), 'name': utils.uuid4()}
        _upload_import_package_group(cfg, repo, package_group)
        repo = client.get(repo['_href'], params={'details': True})
        utils.publish_repo(cfg, repo)

        # Update the repository's package group, and re-publish the repository.
        package_group['name'] = utils.uuid4()
        _upload_import_package_group(cfg, repo, package_group)
        utils.publish_repo(cfg, repo)

        # Fetch the generated repodata of type 'group' (a.k.a. 'comps'). Verify
        # the package group portion.
        root_element = get_repodata(cfg, repo['distributors'][0], 'group')
        groups = root_element.findall('group')
        self.assertEqual(len(groups), 1, ElementTree.tostring(root_element))
        for key, value in package_group.items():
            with self.subTest(key=key):
                self.assertEqual(groups[0].find(key).text, value)
Esempio n. 6
0
    def test_all(self):
        """Work with an image whose name has no namespace.

        Create, sync and publish a Docker repository whose ``UPSTREAM_NAME``
        doesn't include a namespace. A typical Docker image has a name like
        "library/busybox." When a non-namespaced image name like "busybox" is
        given, a prefix of "library" is assumed.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            'upstream_name': 'busybox',
        })
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)

        # Make Crane read the metadata. (Now!)
        cli.GlobalServiceManager(cfg).restart(('httpd', ))

        # Get and inspect /crane/repositories/v2.
        if (cfg.pulp_version >= Version('2.14')
                and selectors.bug_is_testable(2723, cfg.pulp_version)):
            client = self.make_crane_client(cfg)
            repo_id = repo['id']
            repos = client.get('/crane/repositories/v2')
            self.assertIn(repo_id, repos.keys())
            self.assertFalse(repos[repo_id]['protected'])
Esempio n. 7
0
    def test_01_set_up(self):
        """Create, sync and publish a repository.

        Specifically, do the following:

        1. Create, sync and publish a Docker repository. Let the repository's
           feed reference a v1 Docker registry.
        2. Make Crane immediately re-read the metadata files published by Pulp.
           (Restart Apache.)
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1':
            True,
            'enable_v2':
            False,
            'feed':
            DOCKER_V1_FEED_URL,
            'upstream_name':
            get_upstream_name(self.cfg),
        })
        body['distributors'] = [gen_distributor()]
        type(self).repo = client.post(REPOSITORY_PATH, body)
        type(self).repo = client.get(self.repo['_href'],
                                     params={'details': True})
        utils.sync_repo(self.cfg, self.repo)
        utils.publish_repo(self.cfg, self.repo)

        # Make Crane re-read metadata. (Now!)
        cli.GlobalServiceManager(self.cfg).restart(('httpd', ))
Esempio n. 8
0
    def setUpClass(cls):
        """Generate, fetch and parse a ``repomd.xml`` file.

        Do the following:

        1. Create an RPM repository with a YUM distributor and publish it.
        2. Fetch the ``repomd.xml`` file from the distributor, and parse it.
        """
        super(RepoMDTestCase, cls).setUpClass()
        if check_issue_2277(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/2277')

        # Create a repository with a yum distributor and publish it.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        cls.resources.add(repo['_href'])
        utils.publish_repo(cls.cfg, repo)

        # Fetch and parse repomd.xml
        client.response_handler = xml_handler
        path = urljoin(
            '/pulp/repos/',
            repo['distributors'][0]['config']['relative_url'],
        )
        path = urljoin(path, 'repodata/repomd.xml')
        cls.root_element = client.get(path)
Esempio n. 9
0
    def do_test(self, cfg, repo_registry_id):
        """Execute the test with the given ``repo_registry_id``."""
        # Create, sync and publish.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            'upstream_name': get_upstream_name(cfg),
        })
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config']['repo-registry-id'] = (
            repo_registry_id)
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)
        cli.GlobalServiceManager(cfg).restart(('httpd', ))  # restart Crane

        # Get and inspect /crane/repositories/v2.
        client = self.make_crane_client(cfg)
        repos = client.get('/crane/repositories/v2')
        self.assertIn(repo_registry_id, repos.keys())
        self.assertFalse(repos[repo_registry_id]['protected'])
Esempio n. 10
0
    def test_01_set_up(self):
        """Create, sync and publish a Docker repository.

        Specifically, do the following:

        1. Create, sync and publish a Docker repository. Let the repository's
           upstream name reference a repository that has an image with a
           manifest list and no amd64/linux build.
        2. Make Crane immediately re-read the metadata files published by Pulp.
           (Restart Apache.)
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            # DOCKER_UPSTREAM_NAME (dmage/manifest-list-test) has an image
            # without any amd64/linux build. However, it has a v1 manifest.
            'upstream_name': 'dmage/busybox',
        })
        body['distributors'] = [gen_distributor()]
        type(self).repo = client.post(REPOSITORY_PATH, body)
        type(self).repo = client.get(self.repo['_href'],
                                     params={'details': True})
        utils.sync_repo(self.cfg, self.repo)
        utils.publish_repo(self.cfg, self.repo)

        # Make Crane read metadata. (Now!)
        cli.GlobalServiceManager(self.cfg).restart(('httpd', ))
Esempio n. 11
0
    def test_all(self):
        """Publish the rpm rsync distributor before the yum distributor."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(2187, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2187')

        # Create a user and a repository.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })

        # Publish with the rsync distributor.
        distribs = get_dists_by_type_id(cfg, repo)
        args = (cfg, repo, {'id': distribs['rpm_rsync_distributor']['id']})
        self.verify_publish_is_skip(cfg, utils.publish_repo(*args).json())

        # Verify that the rsync distributor hasn't placed files.
        sudo = '' if utils.is_root(cfg) else 'sudo '
        cmd = (sudo + 'ls -1 /home/{}'.format(ssh_user)).split()
        dirs = set(cli.Client(cfg).run(cmd).stdout.strip().split('\n'))
        self.assertNotIn('content', dirs)

        # Publish with the rsync distributor again, and verify again.
        if selectors.bug_is_testable(2722, cfg.pulp_version):
            self.verify_publish_is_skip(cfg, utils.publish_repo(*args).json())
            dirs = set(cli.Client(cfg).run(cmd).stdout.strip().split('\n'))
            self.assertNotIn('content', dirs)
Esempio n. 12
0
    def test_all(self):
        """Add a content unit to a repo in the middle of several publishes."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(2532, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2532')
        rpms = (utils.http_get(RPM_UNSIGNED_URL),
                utils.http_get(RPM2_UNSIGNED_URL))

        # Create a user and a repository.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })

        # Add content, publish w/yum, add more content, publish w/rsync.
        dists = get_dists_by_type_id(cfg, repo)
        for i, key in enumerate(('yum_distributor', 'rpm_rsync_distributor')):
            utils.upload_import_unit(cfg, rpms[i], {'unit_type_id': 'rpm'},
                                     repo)
            utils.publish_repo(cfg, repo, {'id': dists[key]['id']})
        self.verify_remote_units_path(cfg, dists['rpm_rsync_distributor'], 1)

        # Publish with yum and rsync, respectively.
        for key in 'yum_distributor', 'rpm_rsync_distributor':
            utils.publish_repo(cfg, repo, {'id': dists[key]['id']})
        self.verify_remote_units_path(cfg, dists['rpm_rsync_distributor'], 1)
Esempio n. 13
0
    def setUpClass(cls):
        """Create an RPM repository, upload package groups, and publish."""
        super(UploadPackageGroupsTestCase, cls).setUpClass()
        if check_issue_3104(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/3104')

        # Create a repository and add a distributor to it.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Generate several package groups, import them into the repository, and
        # publish the repository.
        cls.package_groups = {
            'minimal': _gen_minimal_group(),
            'realistic': _gen_realistic_group(),
        }
        cls.tasks = {}
        for key, package_group in cls.package_groups.items():
            report = _upload_import_package_group(cls.cfg, repo, package_group)
            cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report))
        utils.publish_repo(cls.cfg, repo)

        # Fetch the generated repodata of type 'group' (a.k.a. 'comps')
        cls.root_element = (get_repodata(cls.cfg, repo['distributors'][0],
                                         'group'))
Esempio n. 14
0
    def setUpClass(cls):
        """Create an RPM repository, upload package groups, and publish."""
        super(UploadPackageGroupsTestCase, cls).setUpClass()

        # Create a repository and add a distributor to it.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Generate several package groups, import them into the repository, and
        # publish the repository.
        cls.package_groups = {
            'minimal': _gen_minimal_group(),
            'realistic': _gen_realistic_group(),
        }
        cls.tasks = {}
        for key, package_group in cls.package_groups.items():
            report = _upload_import_package_group(cls.cfg, repo, package_group)
            cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report))
        utils.publish_repo(cls.cfg, repo)

        # Fetch the generated repodata of type 'group' (a.k.a. 'comps')
        cls.root_element = get_repomd_xml(
            cls.cfg,
            urljoin(
                '/pulp/repos/',
                repo['distributors'][0]['config']['relative_url'],
            ),
            'group'
        )
Esempio n. 15
0
    def test_all(self):
        """Create, sync and publish an OSTree repository.

        Verify that:

        * The distributor's ``last_publish`` attribute is ``None`` after the
          sync. This demonstrates that ``auto_publish`` correctly defaults to
          ``False``.
        * The distributor's ``last_publish`` attribute is not ``None`` after
          the publish.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        # Create a repository.
        body = gen_repo()
        body['importer_config']['feed'] = OSTREE_FEED
        body['importer_config']['branches'] = OSTREE_BRANCHES
        body['distributors'].append(gen_distributor())
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Sync the repository.
        utils.sync_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after sync'):
            self.assertIsNone(repo['distributors'][0]['last_publish'])

        # Publish the repository.
        utils.publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after publish'):
            self.assertIsNotNone(repo['distributors'][0]['last_publish'])
Esempio n. 16
0
    def test_publish_override_config(self):
        """Use the ``packages_directory`` publish override option.

        Create a distributor with default options, and use it to publish the
        repository. Specify the ``packages_directory`` option during the
        publish as an override option. Verify packages end up in the specified
        directory, relative to the published repository's root.
        """
        if selectors.bug_is_untestable(1976, self.cfg.version):
            self.skipTest('https://pulp.plan.io/issues/1976')
        distributor = api.Client(self.cfg).post(
            urljoin(self.repo['_href'], 'distributors/'),
            gen_distributor(),
        ).json()
        packages_dir = utils.uuid4()
        utils.publish_repo(self.cfg, self.repo, {
            'id': distributor['id'],
            'override_config': {'packages_directory': packages_dir},
        })
        primary_xml = get_parse_repodata_primary_xml(self.cfg, distributor)
        package_hrefs = get_package_hrefs(primary_xml)
        self.assertGreater(len(package_hrefs), 0)
        for package_href in package_hrefs:
            with self.subTest(package_href=package_href):
                self.assertEqual(os.path.dirname(package_href), packages_dir)
Esempio n. 17
0
    def test_01_first_repo(self):
        """Create, sync content into and publish a Python repository.

        See:

        * `Pulp #135 <https://pulp.plan.io/issues/135>`_
        * `Pulp Smash #494 <https://github.com/PulpQE/pulp-smash/issues/494>`_
        """
        if (self.cfg.pulp_version < Version('2.13')
                or selectors.bug_is_untestable(135, self.cfg.pulp_version)):
            self.skipTest('https://pulp.plan.io/issues/135')
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = {
            'feed': constants.PYTHON_PYPI_FEED_URL,
            'package_names': 'shelf-reader',
        }
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.repos.append(repo)
        call_report = utils.sync_repo(self.cfg, repo)
        with self.subTest(comment='verify the sync succeeded'):
            self.verify_sync(self.cfg, call_report)
        with self.subTest(comment='verify content units are present'):
            self.verify_package_types(self.cfg, repo)
        repo = get_details(self.cfg, repo)
        utils.publish_repo(self.cfg, repo)
Esempio n. 18
0
 def test_01_add_unit(self):
     """Add a content unit to the repository. Publish the repository."""
     repo_before = self.get_repo()
     rpm = utils.http_get(RPM_UNSIGNED_URL)
     utils.upload_import_unit(
         self.cfg,
         rpm,
         {'unit_type_id': 'rpm'},
         self.repo,
     )
     utils.publish_repo(self.cfg, repo_before)
     repo_after = self.get_repo()
     with self.subTest(comment='last_unit_added'):
         if selectors.bug_is_untestable(1847, self.cfg.pulp_version):
             self.skipTest('https://pulp.plan.io/issues/1847')
         pre = repo_before['last_unit_added']
         post = repo_after['last_unit_added']
         self.assertIsNone(pre)
         self.assertIsNotNone(post)
     with self.subTest(comment='last_unit_removed'):
         pre = repo_before['last_unit_removed']
         post = repo_after['last_unit_removed']
         self.assertIsNone(pre)
         self.assertIsNone(post)
     with self.subTest(comment='last_publish'):
         pre = repo_before['distributors'][0]['last_publish']
         post = repo_after['distributors'][0]['last_publish']
         self.assertIsNone(pre)
         self.assertIsNotNone(post)
Esempio n. 19
0
    def test_all(self):
        """Re-sync a child repository with the ``remove_missing`` enabled."""
        repos = []
        cfg = config.get_config()
        if selectors.bug_is_untestable(2616, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2616')

        # Create 1st repo, sync and publish it.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repos.append(client.post(REPOSITORY_PATH, body))
        self.addCleanup(client.delete, repos[0]['_href'])
        repos[0] = _get_details(cfg, repos[0])
        utils.sync_repo(cfg, repos[0])
        utils.publish_repo(cfg, repos[0])

        # Create 2nd repo, sync.
        body = gen_repo()
        body['importer_config']['feed'] = urljoin(
            cfg.get_base_url(),
            _PUBLISH_DIR +
            repos[0]['distributors'][0]['config']['relative_url'],
        )
        body['importer_config']['remove_missing'] = True
        repos.append(client.post(REPOSITORY_PATH, body))
        self.addCleanup(client.delete, repos[1]['_href'])
        repos[1] = _get_details(cfg, repos[1])
        utils.sync_repo(cfg, repos[1])

        # Remove an arbitrary number of units from 1st repo, re-publish it.
        units = _get_rpms(cfg, repos[0])
        marked_units = random.sample(units, random.randint(1, len(units)))
        for marked_unit in marked_units:
            criteria = {
                'filters': {
                    'unit': {
                        'name': marked_unit['metadata']['name']
                    }
                },
                'type_ids': [marked_unit['unit_type_id']],
            }
            client.post(
                urljoin(repos[0]['_href'], 'actions/unassociate/'),
                {'criteria': criteria},
            )
        utils.publish_repo(cfg, repos[0])

        # Re-sync 2nd repo.
        report = utils.sync_repo(cfg, repos[1])
        tasks = tuple(api.poll_spawned_tasks(cfg, report.json()))
        self.assertEqual(
            tasks[0]['result']['removed_count'],
            len(marked_units),
        )
Esempio n. 20
0
    def test_all(self):
        """Sync a repo whose updateinfo file has multiple pkglist sections.

        Do the following:

        1. Create and sync a repository with an importer and distributor.
           Ensure the importer's feed is set to
           :data:`pulp_smash.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL`.
        2. Publish the repository, and fetch and parse its updateinfo file.
        3. Verify the updateinfo contains the correct number of ``<pkglists>``
           sections, with the correct contents in each.
        """
        cfg = config.get_config()
        if selectors.bug_is_untestable(2227, cfg.version):
            self.skipTest('https://pulp.plan.io/issues/2277')

        # Create and sync a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        self.addCleanup(client.delete, repo['_href'])
        utils.sync_repo(cfg, repo['_href'])

        # Publish the repository, and fetch and parse its updateinfo file.
        self.assertEqual(len(repo['distributors']), 1, repo['distributors'])
        utils.publish_repo(cfg, repo)
        root_element = get_repomd_xml(
            cfg,
            urljoin(
                '/pulp/repos/',
                repo['distributors'][0]['config']['relative_url']
            ),
            'updateinfo'
        )

        # Verify the contents of the updateinfo file.
        debug = ElementTree.tostring(root_element)
        pkglists = root_element.find('update').findall('pkglist')
        self.assertEqual(len(pkglists), 3, debug)

        collections = [pkglist.find('collection') for pkglist in pkglists]
        names = {collection.find('name').text for collection in collections}
        self.assertEqual(names, {'1', '2', '3'}, debug)

        packages = {
            collection.find('package').find('filename').text
            for collection in collections
        }
        self.assertEqual(packages, {
            'penguin-0.9.1-1.noarch.rpm',
            'shark-0.1-1.noarch.rpm',
            'walrus-5.21-1.noarch.rpm',
        }, debug)
Esempio n. 21
0
    def test_all(self):
        """Sync a repo whose updateinfo file has multiple pkglist sections.

        Specifically, do the following:

        1. Create, sync and publish an RPM repository whose feed is set to
           :data:`pulp_smash.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL`.
        2. Fetch and parse the published repository's ``updateinfo.xml`` file.

        Verify that the ``updateinfo.xml`` file has three packages whose
        ``<filename>`` elements have the following text:

        * penguin-0.9.1-1.noarch.rpm
        * shark-0.1-1.noarch.rpm
        * walrus-5.21-1.noarch.rpm

        Note that Pulp is free to change the structure of a source repository
        at will. For example, the source repository has three ``<collection>``
        elements, the published repository can have one, two or three
        ``<collection>`` elements. Assertions are not made about these details.
        """
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        if selectors.bug_is_untestable(2227, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2277')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)

        # Fetch and parse ``updateinfo.xml``.
        updates_element = (get_repodata(cfg, repo['distributors'][0],
                                        'updateinfo'))

        # Verify the ``updateinfo.xml`` file.
        debug = ElementTree.tostring(updates_element)
        filename_elements = (updates_element.findall(
            'update/pkglist/collection/package/filename'))
        filenames = [
            filename_element.text for filename_element in filename_elements
        ]
        filenames.sort()
        self.assertEqual(filenames, [
            'penguin-0.9.1-1.noarch.rpm',
            'shark-0.1-1.noarch.rpm',
            'walrus-5.21-1.noarch.rpm',
        ], debug)
Esempio n. 22
0
    def test_all(self):
        """Test whether copied files retain their original mtime.

        This test targets the following issues:

        * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_
        * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_

        Do the following:

        1. Create, sync and publish a repository, with ``generate_sqlite`` set
           to true.
        2. Get the ``mtime`` of the sqlite files.
        3. Upload an RPM package into the repository, and sync the repository.
        4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes
           are the same.
        """
        cfg = config.get_config()
        if selectors.bug_is_untestable(2783, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2783')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config']['generate_sqlite'] = True
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)

        # Get the mtime of the sqlite files.
        cli_client = cli.Client(cfg, cli.echo_handler)
        cmd = '' if utils.is_root(cfg) else 'sudo '
        cmd += "bash -c \"stat --format %Y '{}'/*\"".format(
            os.path.join(
                _PATH,
                repo['distributors'][0]['config']['relative_url'],
                'repodata',
            ))
        mtimes_pre = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())

        # Upload to the repo, and sync it.
        rpm = utils.http_get(RPM_SIGNED_URL)
        utils.upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
        utils.sync_repo(cfg, repo)

        # Get the mtime of the sqlite files again.
        time.sleep(1)
        mtimes_post = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())
        self.assertEqual(mtimes_pre, mtimes_post)
Esempio n. 23
0
    def test_02_copy_publish(self):
        """Copy and RPM from the first repo to the second, and publish it.

        Execute :meth:`verify_repo_search` and :meth:`verify_repo_download`.
        """
        api.Client(self.cfg).post(
            urljoin(self.repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': self.repos[0]['id']})
        utils.publish_repo(self.cfg, self.repos[1])
        self.verify_repo_search(self.repos[1])
        self.verify_repo_download(self.repos[1])
Esempio n. 24
0
    def test_all(self):
        """Use the ``force_full`` RPM rsync distributor option."""
        cfg = config.get_config()
        cli_client = cli.Client(cfg)
        sudo = '' if utils.is_root(cfg) else 'sudo '

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })
        utils.sync_repo(cfg, repo)

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = get_dists_by_type_id(cfg, repo)
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            utils.publish_repo(cfg, repo, {'id': distribs[type_id]['id']})
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])

        # Remove all files from the target directory, and publish again. Verify
        # that the RPM rsync distributor didn't place any files.
        cmd = sudo + 'rm -rf /home/{}/content'.format(ssh_user)
        cli_client.run(cmd.split())
        self.verify_publish_is_skip(
            cfg,
            utils.publish_repo(cfg, repo, {
                'id': distribs['rpm_rsync_distributor']['id']
            }).json())
        cmd = sudo + 'ls -1 /home/{}'.format(ssh_user)
        dirs = set(cli_client.run(cmd.split()).stdout.strip().split('\n'))
        self.assertNotIn('content', dirs)

        # Publish the repo with ``force_full`` set to true. Verify that the RPM
        # rsync distributor placed files.
        if selectors.bug_is_untestable(2202, cfg.pulp_version):
            return
        utils.publish_repo(
            cfg, repo, {
                'id': distribs['rpm_rsync_distributor']['id'],
                'override_config': {
                    'force_full': True
                },
            })
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])
Esempio n. 25
0
    def test_update_on_copy(self):
        """Check if copying units into a repo updates ``last_unit_added``.

        Do the following:

        1. Create a repository with a feed and sync it.
        2. Create a second repository. Assert the second repository's
           ``last_unit_added`` attribute is null.
        3. Copy a content unit from first repository to the second. Assert the
           second repository's ``last_unit_added`` attribute is non-null.
        4. Publish the second repository. Assert its ``last_unit_added``
           attribute is non-null.
        """
        if selectors.bug_is_untestable(2688, self.cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2688')

        # create a repo with a feed and sync it
        utils.sync_repo(self.cfg, self.repo)
        self.repo = self.client.get(self.repo['_href'],
                                    params={'details': True})

        # create a second repository
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo2 = self.client.post(REPOSITORY_PATH, body)
        self.addCleanup(self.client.delete, repo2['_href'])
        repo2 = self.client.get(repo2['_href'], params={'details': True})
        with self.subTest(comment='after repository creation'):
            self.assertIsNone(repo2['last_unit_added'])

        # copy a content unit from the first repo to the second
        self.client.post(
            urljoin(repo2['_href'], 'actions/associate/'), {
                'source_repo_id': self.repo['id'],
                'criteria': {
                    'filters': {
                        'unit': {
                            'name': 'bear'
                        }
                    },
                    'type_ids': ['rpm'],
                },
            })
        repo2 = self.client.get(repo2['_href'], params={'details': True})
        with self.subTest(comment='after unit association'):
            self.assertIsNotNone(repo2['last_unit_added'], repo2)

        # publish the second repo
        utils.publish_repo(self.cfg, repo2)
        repo2 = self.client.get(repo2['_href'], params={'details': True})
        with self.subTest(comment='after repository publish'):
            self.assertIsNotNone(repo2['last_unit_added'], repo2)
Esempio n. 26
0
 def test_all(self):
     """Execute the test case business logic."""
     cfg = config.get_config()
     if check_issue_3104(cfg):
         self.skipTest('https://pulp.plan.io/issues/3104')
     self.check_issue_2277(cfg)
     self.check_issue_2321(cfg)
     repo = self.create_repo(cfg, RPM_MIRRORLIST_MIXED, _gen_rel_url())
     utils.sync_repo(cfg, repo)
     utils.publish_repo(cfg, repo)
     actual_rpm = get_unit(cfg, repo['distributors'][0], RPM).content
     target_rpm = utils.http_get(RPM_UNSIGNED_URL)
     self.assertEqual(actual_rpm, target_rpm)
Esempio n. 27
0
    def test_01_create_root_repo(self):
        """Create, sync and publish a repository.

        The repositories created in later steps sync from this one.
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        self.repos['root'] = client.post(REPOSITORY_PATH, body)
        self.repos['root'] = _get_details(self.cfg, self.repos['root'])
        utils.sync_repo(self.cfg, self.repos['root'])
        utils.publish_repo(self.cfg, self.repos['root'])
Esempio n. 28
0
    def test_all(self):
        """Publish with a yum and rsync distributor twice."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(2666, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2666')
        if check_issue_2844(cfg):
            self.skipTest('https://pulp.plan.io/issues/2844')

        # Create a user and a repository.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })

        # Add content.
        for url in (RPM_UNSIGNED_URL, RPM2_UNSIGNED_URL):
            utils.upload_import_unit(cfg, utils.http_get(url),
                                     {'unit_type_id': 'rpm'}, repo)
        dists = get_dists_by_type_id(cfg, repo)

        # See https://pulp.plan.io/issues/2844#note-11
        time.sleep(2)

        # Publish with yum and rsync.
        for dist in 'yum_distributor', 'rpm_rsync_distributor':
            report = (utils.publish_repo(cfg, repo, {
                'id': dists[dist]['id']
            }).json())
            publish_task = self.get_publish_task(cfg, report)
        num_processed = self.get_num_processed(publish_task)
        with self.subTest(comment='first rsync publish'):
            self.assertEqual(num_processed, 2, publish_task)

        # Publish with yum and rsync again.
        for dist in 'yum_distributor', 'rpm_rsync_distributor':
            report = (utils.publish_repo(cfg, repo, {
                'id': dists[dist]['id']
            }).json())
            publish_task = self.get_publish_task(cfg, report)
        num_processed = self.get_num_processed(publish_task)
        with self.subTest(comment='second rsync publish'):
            self.assertEqual(num_processed, 0, publish_task)
Esempio n. 29
0
    def create_sync_publish_repo(self, body):
        """Create, sync and publish a repository.

        Also, schedule the repository for deletion.

        :param body: A dict of information to use when creating the repository.
        :return: A detailed dict of information about the repository.
        """
        client = api.Client(self.cfg, api.json_handler)
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(self.cfg, repo)
        utils.publish_repo(self.cfg, repo)
        return repo
Esempio n. 30
0
    def test_01_upload_publish(self):
        """Upload an RPM to the first repository, and publish it.

        Execute :meth:`verify_repo_search` and :meth:`verify_repo_download`.
        """
        repo = self.repos[0]
        utils.upload_import_unit(
            self.cfg,
            self.rpm,
            {'unit_type_id': 'rpm'},
            repo,
        )
        utils.publish_repo(self.cfg, repo)
        self.verify_repo_search(repo)
        self.verify_repo_download(repo)
Esempio n. 31
0
    def setUpClass(cls):
        """Create RPM repository, delete a package, and publish the repository.

        More specifically, do the following:

        1. Create an RPM repository with a distributor.
        2. Sync the created repository.
        3. Remove the ``gorilla`` package
        4. Publish the repository. Fetch the ``updateinfo.xml`` file from the
           distributor (via ``repomd.xml``), and parse it.
        """
        super(ErratumPkgListCountTestCase, cls).setUpClass()

        # Create a repository.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        cls.resources.add(repo['_href'])

        # Sync the repo.
        utils.sync_repo(cls.cfg, repo['_href'])

        # Remove the gorilla package unit
        client.post(
            urljoin(repo['_href'], 'actions/unassociate/'),
            {'criteria': get_unit_unassociate_criteria(RPM_ERRATUM_RPM_NAME)},
        )

        # Publish the repository
        utils.publish_repo(cls.cfg, repo)

        # Fetch and parse updateinfo.xml (or updateinfo.xml.gz), via repomd.xml
        root_element = get_repomd_xml(
            cls.cfg,
            urljoin(
                '/pulp/repos/',
                repo['distributors'][0]['config']['relative_url'],
            ),
            'updateinfo'
        )

        # Fetch the erratum and erratum pkglist for the gorilla package
        updates = _get_updates_by_id(root_element)
        erratum = updates[RPM_ERRATUM_ID]
        cls.erratum_pkglists = erratum.findall('pkglist')
Esempio n. 32
0
    def health_check(self):
        """Execute step three of the test plan."""
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(self.cfg, repo)
        utils.publish_repo(self.cfg, repo)
        pulp_rpm = get_unit(self.cfg, repo['distributors'][0], RPM).content

        # Does this RPM match the original RPM?
        rpm = utils.http_get(RPM_SIGNED_URL)
        self.assertEqual(rpm, pulp_rpm)
Esempio n. 33
0
    def test_all(self):
        """Publish the rpm rsync distributor before the yum distributor."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(2187, cfg.version):
            self.skipTest('https://pulp.plan.io/issues/2187')

        # Create a user and a repository.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(cfg, {'remote': {
            'host': urlparse(cfg.base_url).netloc,
            'root': '/home/' + ssh_user,
            'ssh_identity_file': ssh_identity_file,
            'ssh_user': ssh_user,
        }})

        # Publish with the rsync distributor.
        distribs = _get_dists_by_type_id(cfg, repo['_href'])
        self.verify_publish_is_skip(cfg, utils.publish_repo(
            cfg,
            repo,
            {'id': distribs['rpm_rsync_distributor']['id']}
        ).json())

        # Verify that the rsync distributor hasn't placed files
        sudo = '' if utils.is_root(cfg) else 'sudo '
        cmd = (sudo + 'ls -1 /home/{}'.format(ssh_user)).split()
        dirs = set(cli.Client(cfg).run(cmd).stdout.strip().split('\n'))
        self.assertNotIn('content', dirs)
Esempio n. 34
0
    def health_check(self):
        """Execute step three of the test plan."""
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(self.cfg, repo['_href'])
        utils.publish_repo(self.cfg, repo)
        pulp_rpm = get_unit(self.cfg, repo, RPM).content

        # Does this RPM match the original RPM?
        rpm = utils.http_get(RPM_SIGNED_URL)
        self.assertEqual(rpm, pulp_rpm)
Esempio n. 35
0
    def _create_sync_publish_repo(self, cfg):
        """Create, sync and publish a repository.

        Also, schedule it for deletion. Return a detailed dict of information
        about the repository.
        """
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)
        return repo
Esempio n. 36
0
    def test_all(self):
        """Use the ``force_full`` RPM rsync distributor option."""
        cfg = config.get_config()
        cli_client = cli.Client(cfg)
        sudo = '' if utils.is_root(cfg) else 'sudo '

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(cfg, {'remote': {
            'host': urlparse(cfg.base_url).netloc,
            'root': '/home/' + ssh_user,
            'ssh_identity_file': ssh_identity_file,
            'ssh_user': ssh_user,
        }})
        utils.sync_repo(cfg, repo['_href'])

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = _get_dists_by_type_id(cfg, repo['_href'])
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            utils.publish_repo(cfg, repo, {'id': distribs[type_id]['id']})
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])

        # Remove all files from the target directory, and publish again. Verify
        # that the RPM rsync distributor didn't place any files.
        cmd = sudo + 'rm -rf /home/{}/content'.format(ssh_user)
        cli_client.run(cmd.split())
        self.verify_publish_is_skip(cfg, utils.publish_repo(
            cfg,
            repo,
            {'id': distribs['rpm_rsync_distributor']['id']}
        ).json())
        cmd = sudo + 'ls -1 /home/{}'.format(ssh_user)
        dirs = set(cli_client.run(cmd.split()).stdout.strip().split('\n'))
        self.assertNotIn('content', dirs)

        # Publish the repo with ``force_full`` set to true. Verify that the RPM
        # rsync distributor placed files.
        if selectors.bug_is_untestable(2202, cfg.version):
            return
        utils.publish_repo(cfg, repo, {
            'id': distribs['rpm_rsync_distributor']['id'],
            'override_config': {'force_full': True},
        })
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])
Esempio n. 37
0
    def publish_to_dir(self, entity_href, distributor_id):
        """Create an export directory, publish to it, and change its owner.

        For details, see :class:`ExportDirMixin`.

        :param entity_href: The path to an entity such as a repository or a
            repository group.
        :param distributor_id: The ID of the distributor to use when exporting.
        :returns: The path to the export directory.
        """
        export_dir = self.create_export_dir()
        utils.publish_repo(self.cfg, repo={'_href': entity_href}, json={
            'id': distributor_id,
            'override_config': {'export_dir': export_dir},
        })
        self.change_export_dir_owner(export_dir)
        return export_dir
Esempio n. 38
0
    def publish_to_dir(self, entity_href, distributor_id):
        """Create an export directory, publish to it, and change its owner.

        For details, see :class:`ExportDirMixin`.

        :param entity_href: The path to an entity such as a repository or a
            repository group.
        :param distributor_id: The ID of the distributor to use when exporting.
        :returns: The path to the export directory.
        """
        export_dir = self.create_export_dir()
        utils.publish_repo(self.cfg, repo={'_href': entity_href}, json={
            'id': distributor_id,
            'override_config': {'export_dir': export_dir},
        })
        self.change_export_dir_owner(export_dir)
        return export_dir
Esempio n. 39
0
    def test_publish_to_web(self):
        """Publish the repository to the web, and fetch the ISO file.

        The ISO file should be available over both HTTP and HTTPS. Fetch it
        from both locations, and assert that the fetch was successful.
        """
        # Publish the repository, and re-read the distributor.
        utils.publish_repo(self.cfg, self.repo, {'id': self.distributor['id']})
        client = api.Client(self.cfg)
        distributor = client.get(self.distributor['_href']).json()

        # Fetch the ISO file via HTTP and HTTPS.
        url = _get_iso_url(self.cfg, self.repo, 'repos', distributor)
        for scheme in ('http', 'https'):
            url = urlunparse((scheme,) + urlparse(url)[1:])
            with self.subTest(url=url):
                self.assertEqual(client.get(url).status_code, 200)
Esempio n. 40
0
    def test_all(self):
        """Use the ``delete`` RPM rsync distributor option."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(2221, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2221')
        api_client = api.Client(cfg)

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })
        utils.sync_repo(cfg, repo)

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = get_dists_by_type_id(cfg, repo)
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            utils.publish_repo(cfg, repo, {'id': distribs[type_id]['id']})
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])

        # Disassociate all units from the repo, publish the repo, and verify.
        api_client.post(urljoin(repo['_href'], 'actions/unassociate/'),
                        {'criteria': {}})
        utils.publish_repo(cfg, repo,
                           {'id': distribs['yum_distributor']['id']})
        self._verify_units_not_in_repo(cfg, repo['_href'])

        # Publish with the RPM rsync distributor, and verify that no RPMs are
        # in the target directory.
        api_client.post(
            urljoin(repo['_href'], 'actions/publish/'), {
                'id': distribs['rpm_rsync_distributor']['id'],
                'override_config': {
                    'delete': True
                },
            })
        self._verify_files_not_in_dir(cfg, **distribs)
Esempio n. 41
0
    def test_publish_to_web(self):
        """Publish the repository to the web, and fetch the ISO file.

        The ISO file should be available over both HTTP and HTTPS. Fetch it
        from both locations, and assert that the fetch was successful.
        """
        # Publish the repository, and re-read the distributor.
        utils.publish_repo(self.cfg, self.repo, {'id': self.distributor['id']})
        client = api.Client(self.cfg)
        distributor = client.get(self.distributor['_href']).json()

        # Fetch the ISO file via HTTP and HTTPS.
        url = _get_iso_url(self.cfg, self.repo, 'repos', distributor)
        for scheme in ('http', 'https'):
            url = urlunparse((scheme,) + urlparse(url)[1:])
            with self.subTest(url=url):
                self.assertEqual(client.get(url).status_code, 200)
Esempio n. 42
0
    def setUpClass(cls):
        """Create an RPM repository, upload errata, and publish the repository.

        More specifically, do the following:

        1. Create an RPM repository with a distributor.
        2. Generate a pair of errata. Upload them to Pulp and import them into
           the repository.
        3. Publish the repository. Fetch the ``updateinfo.xml`` file from the
           distributor (via ``repomd.xml``), and parse it.
        """
        super(UpdateInfoTestCase, cls).setUpClass()
        cls.errata = {
            'import_no_pkglist': _gen_errata_no_pkglist(),
            'import_typical': _gen_errata_typical(),
        }
        cls.tasks = {}  # {'import_no_pkglist': (…), 'import_typical': (…)}

        # Create a repository and add a yum distributor.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        cls.resources.add(repo['_href'])

        # Import errata into our repository. Publish the repository.
        for key, erratum in cls.errata.items():
            report = utils.upload_import_erratum(
                cls.cfg,
                erratum,
                repo['_href'],
            )
            cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report))
        utils.publish_repo(cls.cfg, repo)

        # Fetch and parse updateinfo.xml (or updateinfo.xml.gz), via repomd.xml
        cls.root_element = get_repomd_xml(
            cls.cfg,
            urljoin(
                '/pulp/repos/',
                repo['distributors'][0]['config']['relative_url'],
            ),
            'updateinfo'
        )
Esempio n. 43
0
    def test_all(self):
        """Use the ``delete`` RPM rsync distributor option."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(2221, cfg.version):
            self.skipTest('https://pulp.plan.io/issues/2221')
        api_client = api.Client(cfg)

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(cfg, {'remote': {
            'host': urlparse(cfg.base_url).netloc,
            'root': '/home/' + ssh_user,
            'ssh_identity_file': ssh_identity_file,
            'ssh_user': ssh_user,
        }})
        utils.sync_repo(cfg, repo['_href'])

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = _get_dists_by_type_id(cfg, repo['_href'])
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            utils.publish_repo(cfg, repo, {'id': distribs[type_id]['id']})
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])

        # Disassociate all units from the repo, publish the repo, and verify.
        api_client.post(urljoin(repo['_href'], 'actions/unassociate/'), {
            'criteria': {}
        })
        utils.publish_repo(
            cfg,
            repo,
            {'id': distribs['yum_distributor']['id']}
        )
        self._verify_units_not_in_repo(cfg, repo['_href'])

        # Publish with the RPM rsync distributor, and verify that no RPMs are
        # in the target directory.
        api_client.post(urljoin(repo['_href'], 'actions/publish/'), {
            'id': distribs['rpm_rsync_distributor']['id'],
            'override_config': {'delete': True},
        })
        self._verify_files_not_in_dir(cfg, **distribs)
Esempio n. 44
0
    def setUpClass(cls):
        """Publish a repository, change it, and publish it again."""
        super(ChangeRepoTestCase, cls).setUpClass()
        client = api.Client(cls.cfg)
        relative_url = cls.repo['distributors'][0]['config']['relative_url']

        # Publish, remove a unit, and publish again
        cls.call_reports.append(
            utils.publish_repo(cls.cfg, cls.repo).json()
        )
        cls.repomd_xmls.append(client.get(get_repomd_xml_path(relative_url)))
        client.post(
            urljoin(cls.repo['_href'], 'actions/unassociate/'),
            {'criteria': {'type_ids': ['rpm'], 'limit': 1}}
        )
        cls.call_reports.append(
            utils.publish_repo(cls.cfg, cls.repo).json()
        )
        cls.repomd_xmls.append(client.get(get_repomd_xml_path(relative_url)))
Esempio n. 45
0
    def test_default_behaviour(self):
        """Do not use the ``packages_directory`` option.

        Create a distributor with default options, and use it to publish the
        repository. Verify packages end up in the current directory, relative
        to the published repository's root. (This same directory contains the
        ``repodata`` directory, and it may be changed by setting the
        distributor's ``relative_url``.)
        """
        distributor = api.Client(self.cfg).post(
            urljoin(self.repo['_href'], 'distributors/'),
            gen_distributor(),
        ).json()
        utils.publish_repo(self.cfg, self.repo, {'id': distributor['id']})
        primary_xml = get_parse_repodata_primary_xml(self.cfg, distributor)
        package_hrefs = get_package_hrefs(primary_xml)
        self.assertGreater(len(package_hrefs), 0)
        for package_href in package_hrefs:
            with self.subTest(package_href=package_href):
                self.assertEqual(os.path.dirname(package_href), '')
Esempio n. 46
0
 def setUpClass(cls):
     """Publish a repository twice."""
     super(PubTwiceTestCase, cls).setUpClass()
     client = api.Client(cls.cfg)
     distributor = cls.repo['distributors'][0]
     for _ in range(2):
         cls.call_reports.append(
             utils.publish_repo(cls.cfg, cls.repo).json()
         )
         cls.repomd_xmls.append(client.get(
             get_repomd_xml_path(distributor['config']['relative_url'])
         ))
Esempio n. 47
0
    def test_01_force_full_false(self):
        """Publish the repository and set ``force_full`` to false.

        A full publish should occur.
        """
        call_report = utils.publish_repo(self.cfg, self.repo, {
            'id': self.repo['distributors'][0]['id'],
            'override_config': {'force_full': False}
        }).json()
        last_task = next(api.poll_spawned_tasks(self.cfg, call_report))
        task_steps = last_task['result']['details']
        step = self.get_step(task_steps, 'rpms')
        self.assertGreater(step['num_processed'], 0, step)
Esempio n. 48
0
 def setUpClass(cls):
     """Publish a repository twice, with an override config both times."""
     super(PubTwiceWithOverrideTestCase, cls).setUpClass()
     client = api.Client(cls.cfg)
     relative_url = utils.uuid4() + '/'
     for _ in range(2):
         cls.call_reports.append(utils.publish_repo(cls.cfg, cls.repo, {
             'id': cls.repo['distributors'][0]['id'],
             'override_config': {'relative_url': relative_url},
         }).json())
         cls.repomd_xmls.append(client.get(
             get_repomd_xml_path(relative_url)
         ))
Esempio n. 49
0
    def setUpClass(cls):
        """Create, sync and publish a repository. Fetch its ``comps.xml``."""
        super(SyncRepoTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo.
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Sync and publish the repo.
        utils.sync_repo(cls.cfg, repo['_href'])
        utils.publish_repo(cls.cfg, repo)

        # Fetch and parse comps.xml.
        dist = repo['distributors'][0]
        dist_url = urljoin('/pulp/repos/', dist['config']['relative_url'])
        cls.root_element = get_repomd_xml(cls.cfg, dist_url, 'group')
        cls.xml_as_str = ElementTree.tostring(cls.root_element)
Esempio n. 50
0
    def setUpClass(cls):
        """Publish a repository, remove a unit, and publish it again.

        Specifically, do the following:

        1. Create a repository with a feed and distributor.
        2. Sync and publish the repository.
        3. Select a content unit at random. Remove it from the repository.
        4. Publish the repository.
        """
        super(RemoveAndRepublishTestCase, cls).setUpClass()

        # Create a repository with a feed and distributor.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        cls.resources.add(repo['_href'])  # mark for deletion

        # Sync and publish the repository.
        utils.sync_repo(cls.cfg, repo['_href'])
        utils.publish_repo(cls.cfg, repo)

        # List RPM content units in the repository. Pick one and remove it.
        # NOTE: There are two versions of the "walrus" RPM, and this works even
        # when that name is picked.
        unit = random.choice(_search_units(cls.cfg, repo['_href'], ('rpm',)))
        cls.unit_id = _get_unit_id(unit)
        _remove_unit(cls.cfg, repo['_href'], unit)

        # Re-publish the repository. sleep() for test_compare_timestamps.
        # Re-read the repository so the test methods have fresh data.
        time.sleep(2)
        utils.publish_repo(cls.cfg, repo)
        cls.repo = client.get(repo['_href'], params={'details': True})
Esempio n. 51
0
    def setUpClass(cls):
        """Upload an erratum to a repo, publish, and download the erratum.

        Do the following:

        1. Create an RPM repository with a distributor.
        2. Upload an erratum to the repository.
        3. Publish the repository.
        4. Fetch the repository's ``updateinfo.xml`` file.
        """
        super(UploadErratumTestCase, cls).setUpClass()
        if check_issue_2387(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/2387')
        if check_issue_2277(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/2277')
        cls.erratum = gen_erratum()

        # Create an RPM repository with a feed and distributor.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])

        # Sync content into the repository, and give it an erratum.
        utils.sync_repo(cls.cfg, repo['_href'])
        utils.upload_import_erratum(cls.cfg, cls.erratum, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Publish the repository, and fetch and parse updateinfo.xml
        utils.publish_repo(cls.cfg, repo)
        path = urljoin(
            '/pulp/repos/',
            repo['distributors'][0]['config']['relative_url']
        )
        cls.updateinfo = get_repomd_xml(cls.cfg, path, 'updateinfo')
Esempio n. 52
0
    def test_02_force_full_omit(self):
        """Publish the repository and omit ``force_full``.

        A fast-forward publish should occur. This test targets `Pulp #1966`_.

        .. _Pulp #1966: https://pulp.plan.io/issues/1966
        """
        if (self.cfg.version >= Version('2.9') and
                selectors.bug_is_untestable(1966, self.cfg.version)):
            self.skipTest('https://pulp.plan.io/issues/1966')
        call_report = utils.publish_repo(self.cfg, self.repo).json()
        last_task = next(api.poll_spawned_tasks(self.cfg, call_report))
        task_steps = last_task['result']['details']
        step = self.get_step(task_steps, 'rpms')
        self.assertEqual(step['num_processed'], 0, step)
Esempio n. 53
0
    def test_all(self):
        """Exercise the ``remote_units_path`` option."""
        cfg = config.get_config()
        # We already know Pulp can deal with 2-segment paths, due to the
        # default remote_units_path of 'content/units'.
        paths = (
            os.path.join(*[utils.uuid4() for _ in range(3)]),
            os.path.join(*[utils.uuid4() for _ in range(1)]),
        )

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo = self.make_repo(cfg, {
            'remote': {
                'host': urlparse(cfg.base_url).netloc,
                'root': '/home/' + ssh_user,
                'ssh_identity_file': ssh_identity_file,
                'ssh_user': ssh_user,
            },
            'remote_units_path': paths[0],
        })
        distribs = _get_dists_by_type_id(cfg, repo['_href'])
        utils.sync_repo(cfg, repo['_href'])

        # Publish the repo with the yum and rpm rsync distributors,
        # respectively. Verify that files have been correctly placed.
        distribs = _get_dists_by_type_id(cfg, repo['_href'])
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            utils.publish_repo(cfg, repo, {
                'id': distribs[type_id]['id'],
                'config': {'remote_units_path': paths[1]},
            })
        distribs['rpm_rsync_distributor']['remote_units_path'] = paths[1]
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])
Esempio n. 54
0
    def test_all(self):
        """Publish a repository with the repoview feature on and off."""
        cfg = config.get_config()
        if cfg.version < Version('2.9'):
            self.skipTest('https://pulp.plan.io/issues/189')

        # Create a repo, and add content
        client = api.Client(cfg)
        body = gen_repo()
        body['distributors'] = [gen_distributor()]
        repo = client.post(constants.REPOSITORY_PATH, body).json()
        self.addCleanup(client.delete, repo['_href'])
        rpm = utils.http_get(constants.RPM_UNSIGNED_URL)
        utils.upload_import_unit(cfg, rpm, 'rpm', repo['_href'])

        # Get info about the repo distributor
        repo = client.get(repo['_href'], params={'details': True}).json()
        pub_path = urljoin(
            '/pulp/repos/',
            repo['distributors'][0]['config']['relative_url']
        )

        # Publish the repo
        utils.publish_repo(cfg, repo)
        response = client.get(pub_path)
        with self.subTest(comment='first publish'):
            self.assertEqual(len(response.history), 0, response.history)

        # Publish the repo a second time
        utils.publish_repo(cfg, repo, {
            'id': repo['distributors'][0]['id'],
            'override_config': {'generate_sqlite': True, 'repoview': True},
        })
        response = client.get(pub_path)
        with self.subTest(comment='second publish'):
            self.assertEqual(len(response.history), 1, response.history)
            self.assertEqual(
                response.request.url,
                urljoin(response.history[0].request.url, 'repoview/index.html')
            )

        # Publish the repo a third time
        if selectors.bug_is_untestable(2349, cfg.version):
            self.skipTest('https://pulp.plan.io/issues/2349')
        utils.publish_repo(cfg, repo)
        response = client.get(pub_path)
        with self.subTest(comment='third publish'):
            self.assertEqual(len(response.history), 0, response.history)
Esempio n. 55
0
    def test_03_force_full_true(self):
        """Publish the repository and set ``force_full`` to true.

        A full publish should occur. The "force" publish feature was introduced
        in Pulp 2.9, and as such, this test will skip when run against an older
        version of Pulp. See `Pulp #1938`_.

        .. _Pulp #1938: https://pulp.plan.io/issues/1938
        """
        if self.cfg.version < Version('2.9'):
            self.skipTest(
                'This test requires Pulp 2.9. See: '
                'https://pulp.plan.io/issues/1938'
            )
        call_report = utils.publish_repo(self.cfg, self.repo, {
            'id': self.repo['distributors'][0]['id'],
            'override_config': {'force_full': True}
        }).json()
        last_task = next(api.poll_spawned_tasks(self.cfg, call_report))
        task_steps = last_task['result']['details']
        step = self.get_step(task_steps, 'rpms')
        self.assertGreater(step['num_processed'], 0, step)
Esempio n. 56
0
    def test_all(self):
        """Ensure fast-forward publishes use files referenced by repomd.xml."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(1088, cfg.version):
            self.skipTest('https://pulp.plan.io/issues/1088')
        repo = self._create_sync_repo(cfg)
        old_phrase = 'A dummy package of'
        new_phrase = utils.uuid4()

        # Publish the repository, and verify its […]-primary.xml file.
        utils.publish_repo(cfg, repo)
        primary_xml = self._read_primary_xml(cfg, repo)
        self.assertIn(old_phrase, primary_xml)
        self.assertNotIn(new_phrase, primary_xml)

        # Create a dummy-primary.xml. Trigger a regular publish.
        self._create_dummy_primary_xml(cfg, repo, old_phrase, new_phrase)
        api.Client(cfg).post(urljoin(repo['_href'], 'actions/unassociate/'), {
            'criteria': {
                'filters': {'unit': {'name': 'bear'}},
                'type_ids': ['rpm'],
            }
        })
        utils.publish_repo(cfg, repo)
        primary_xml = self._read_primary_xml(cfg, repo)
        self.assertIn(old_phrase, primary_xml)
        self.assertNotIn(new_phrase, primary_xml)

        # Create a dummy-primary.xml. Trigger an incremental fast-forward pub.
        # Fast-forward publish described here: https://pulp.plan.io/issues/2113
        self._create_dummy_primary_xml(cfg, repo, old_phrase, new_phrase)
        utils.sync_repo(cfg, repo['_href'])
        utils.publish_repo(cfg, repo)
        primary_xml = self._read_primary_xml(cfg, repo)
        self.assertIn(old_phrase, primary_xml)
        self.assertNotIn(new_phrase, primary_xml)