Exemple #1
0
    def setUpClass(cls):
        """Create a schedule to publish the repository.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 30 seconds
        """
        super().setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        sync_repo(cls.cfg, repo)

        # Schedule a publish to run every 30 seconds
        distributor = gen_distributor()
        distributor_url = urljoin(repo['_href'], 'distributors/')
        client.post(distributor_url, distributor)
        scheduling_url = urljoin(
            distributor_url,
            '{}/schedules/publish/'.format(distributor['distributor_id']),
        )
        cls.response = client.post(scheduling_url, {'schedule': 'PT30S'})
        cls.attrs = cls.response.json()
Exemple #2
0
    def test_01_set_up(self):
        """Create, sync and publish a Docker repository.

        Specifically, do the following:

        1. Create, sync and publish a Docker repository. Let the repository's
           upstream name reference a repository that has an image with a
           manifest list and no amd64/linux build.
        2. Make Crane immediately re-read the metadata files published by Pulp.
           (Restart Apache.)
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            # DOCKER_UPSTREAM_NAME (dmage/manifest-list-test) has an image
            # without any amd64/linux build. However, it has a v1 manifest.
            'upstream_name': 'dmage/busybox',
        })
        body['distributors'] = [gen_distributor()]
        type(self).repo = client.post(REPOSITORY_PATH, body)
        type(self).repo = client.get(self.repo['_href'],
                                     params={'details': True})
        sync_repo(self.cfg, self.repo)
        publish_repo(self.cfg, self.repo)

        # Make Crane read metadata. (Now!)
        cli.GlobalServiceManager(self.cfg).restart(('httpd', ))
Exemple #3
0
    def test_all(self):
        """Verify whether uploaded module.yaml is reflected in the pulp repo."""
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.17'):
            raise unittest.SkipTest(
                'This test requires at least Pulp 2.17 or newer.')
        client = api.Client(cfg, api.json_handler)
        # Create a normal Repo without any data.
        body = gen_repo(importer_config={'feed': RPM_UNSIGNED_FEED_URL},
                        distributors=[gen_distributor()])
        repo = client.post(REPOSITORY_PATH, body)
        repo = client.get(repo['_href'], params={'details': True})
        self.addCleanup(client.delete, repo['_href'])
        sync_repo(cfg, repo)

        # download modules.yaml and upload it to pulp_repo
        unit = self._get_module_yaml_file(RPM_WITH_MODULES_FEED_URL)
        upload_import_unit(cfg, unit, {
            'unit_key': {},
            'unit_type_id': 'modulemd',
        }, repo)
        repo = client.get(repo['_href'], params={'details': True})
        # Assert that `modulemd` and `modulemd_defaults` are present on the
        # repository.
        self.assertIsNotNone(repo['content_unit_counts']['modulemd'])
        self.assertIsNotNone(repo['content_unit_counts']['modulemd_defaults'])
Exemple #4
0
    def test_all(self):
        """Verify whether package manager can read module list from a Pulp repo."""
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.17'):
            raise unittest.SkipTest(
                'This test requires at least Pulp 2.17 or newer.')
        if not os_support_modularity(cfg):
            raise unittest.SkipTest(
                'This test requires an OS that supports modularity.')
        client = api.Client(cfg, api.json_handler)
        body = gen_repo(importer_config={'feed': RPM_WITH_MODULES_FEED_URL},
                        distributors=[gen_distributor()])

        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        repo_path = gen_yum_config_file(
            cfg,
            baseurl=urljoin(
                cfg.get_base_url(),
                urljoin('pulp/repos/',
                        repo['distributors'][0]['config']['relative_url'])),
            name=repo['_href'],
            repositoryid=repo['id'])
        cli_client = cli.Client(cfg)
        self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True)
        lines = cli_client.run((('dnf', 'module', 'list', '--all')),
                               sudo=True).stdout.splitlines()
        for key, value in MODULE_FIXTURES_PACKAGES.items():
            with self.subTest(package=key):
                module = [line for line in lines if key in line]
                self.assertEqual(len(module), value, module)
Exemple #5
0
    def setUpClass(cls):
        """Create a schedule to publish a repo, verify the ``total_run_count``.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 2 minutes
        4. Wait for 130 seconds and read the schedule to get the number of
           "publish" runs
        """
        super().setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        sync_repo(cls.cfg, repo)

        # Schedule a publish to run every 2 minutes
        distributor = gen_distributor()
        client.post(urljoin(repo['_href'], 'distributors/'), distributor)
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        schedule_path = urljoin(repo['_href'], scheduling_url)
        schedule = client.post(schedule_path, {'schedule': 'PT2M'})

        # Wait for publish to run
        time.sleep(130)

        # Read the schedule
        cls.response = client.get(schedule['_href'])
Exemple #6
0
    def test_all(self):
        """Create, sync and publish an OSTree repository.

        Verify that:

        * The distributor's ``last_publish`` attribute is ``None`` after the
          sync. This demonstrates that ``auto_publish`` correctly defaults to
          ``False``.
        * The distributor's ``last_publish`` attribute is not ``None`` after
          the publish.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        # Create a repository.
        body = gen_repo()
        body['importer_config']['feed'] = OSTREE_FEED
        body['importer_config']['branches'] = OSTREE_BRANCHES
        body['distributors'].append(gen_distributor())
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Sync the repository.
        sync_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after sync'):
            self.assertIsNone(repo['distributors'][0]['last_publish'])

        # Publish the repository.
        publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after publish'):
            self.assertIsNotNone(repo['distributors'][0]['last_publish'])
Exemple #7
0
    def test_broken_symlinks(self):
        """Assert that the rsync yum metadata is not a symlink."""
        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(self.cfg)
        ssh_identity_file = self.write_private_key(self.cfg, priv_key)
        repo = self.make_repo(
            self.cfg, {
                'remote': {
                    'host': urlparse(self.cfg.get_base_url()).hostname,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            }, RPM_YUM_METADATA_FILE)
        sync_repo(self.cfg, repo)

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = get_dists_by_type_id(self.cfg, repo)
        self.maybe_disable_selinux(self.cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            publish_repo(self.cfg, repo, {'id': distribs[type_id]['id']})
        path = os.path.join(
            distribs['rpm_rsync_distributor']['config']['remote']['root'],
            distribs['yum_distributor']['config']['relative_url'], 'repodata')

        # Assert that the productid was not saved as symlink
        productid_symlink = self.find_productid(True, path)
        self.assertEqual(len(productid_symlink), 0, productid_symlink)

        # Assert that the productid was saved as a file
        productid_file = self.find_productid(False, path)
        self.assertEqual(len(productid_file), 1, productid_file)
Exemple #8
0
    def test_all(self):
        """Create and sync a puppet repository with no feed."""
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2628, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2628')

        # Create a repository.
        client = api.Client(cfg, api.json_handler)
        repo = client.post(REPOSITORY_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        # Sync the repository. An error *should* occur. We just want the error
        # to be sane.
        with self.assertRaises(exceptions.TaskReportError) as err:
            sync_repo(cfg, repo)
        with self.subTest(comment='check task "error" field'):
            self.assertIsNotNone(err.exception.task['error'])
            self.assertNotEqual(
                err.exception.task['error']['description'],
                "'NoneType' object has no attribute 'endswith'"
            )
            self.assertNotEqual(err.exception.task['error']['code'], 'PLP0000')
        with self.subTest(comment='check task "exception" field'):
            self.assertIsNone(err.exception.task['exception'])
        with self.subTest(comment='check task "traceback" field'):
            self.assertIsNone(err.exception.task['traceback'])
Exemple #9
0
 def test_update_tag_another_repo(self):
     """Check if tagging fail for a manifest from another repo."""
     other = create_docker_repo(self.cfg, 'library/swarm')
     self.addCleanup(api.Client(self.cfg).delete, other['_href'])
     sync_repo(self.cfg, other)
     other = api.Client(self.cfg, api.json_handler).get(
         other['_href'], params={'details': True})
     other_manifest = random.choice(search_units(
         self.cfg, other, {'type_ids': ['docker_manifest']}))
     tag_name = utils.uuid4()
     with self.assertRaises(TaskReportError) as context:
         import_upload(self.cfg, self.repo, {
             'unit_type_id': 'docker_tag',
             'unit_key': {
                 'repo_id': self.repo['id'],
                 'name': tag_name,
             },
             'unit_metadata': {
                 'name': tag_name,
                 'digest': other_manifest['metadata']['digest'],
             },
         })
     self.assertEqual(
         'Manifest with digest {} could not be found in repository {}.'
         .format(other_manifest['metadata']['digest'], self.repo['id']),
         context.exception.task['error']['description']
     )
     self.assertEqual(len(self._get_tags()), len(self.tags))
Exemple #10
0
 def test_all(self):
     """Execute the test case business logic."""
     cfg = config.get_config()
     self.check_issue_2363(cfg)
     repo = self.create_repo(cfg, RPM_MIRRORLIST_BAD, _gen_rel_url())
     with self.assertRaises(TaskReportError):
         sync_repo(cfg, repo)
Exemple #11
0
def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip the tests in this module. Create and sync an RPM repo.

    Skip this module of tests if Pulp is older than version 2.9. (See `Pulp
    #1724`_.) Then create an RPM repository with a feed and sync it. Test cases
    may copy data from this repository but should **not** change it.

    .. _Pulp #1724: https://pulp.plan.io/issues/1724
    """
    set_up_module()
    cfg = config.get_config()
    if cfg.pulp_version < Version('2.9'):
        raise unittest.SkipTest('This module requires Pulp 2.9 or greater.')
    if check_issue_2277(cfg):
        raise unittest.SkipTest('https://pulp.plan.io/issues/2277')

    # Create and sync a repository.
    client = api.Client(cfg, api.json_handler)
    _CLEANUP.append((client.delete, [ORPHANS_PATH], {}))
    body = gen_repo()
    body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
    _REPO.clear()
    _REPO.update(client.post(REPOSITORY_PATH, body))
    _CLEANUP.append((client.delete, [_REPO['_href']], {}))
    try:
        sync_repo(cfg, _REPO)
    except (exceptions.CallReportError, exceptions.TaskReportError,
            exceptions.TaskTimedOutError):
        tearDownModule()
        raise
Exemple #12
0
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the published repository.
        5. Download the same RPM to ensure it is served by the cache.
        """
        super().setUpClass()
        if check_issue_3104(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/3104')
        if not selectors.bug_is_fixed(4120, cls.cfg.pulp_version):
            raise unittest.SkipTest('https://pulp.plan.io/issues/4120')

        # Ensure `locally_stored_units` is 0 before we start.
        reset_squid(cls.cfg)
        reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        sync_repo(cls.cfg, repo)

        # Read the repository.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()

        # Download the same RPM twice.
        cls.rpm = get_unit(cls.cfg, cls.repo['distributors'][0], RPM)
        cls.same_rpm = get_unit(cls.cfg, cls.repo['distributors'][0], RPM)
Exemple #13
0
    def test_all(self):
        """Search contents of a richnweak repository matching package name.

        This test targets `Pulp #3929`_ and `Pulp Smash #901`_. The
        `repository content`_ documentation describes the CLI content syntax.

        .. _Pulp #3929:  https://pulp.plan.io/issues/3929
        .. _Pulp Smash #901: https://github.com/PulpQE/pulp-smash/issues/901
        .. _repository content:
            https://docs.pulpproject.org/en/latest/user-guide/admin-client/repositories.html#content-search

        Asserts the required fields are present.
        """
        cfg = config.get_config()
        if cfg.pulp_version < Version('2.17.1'):
            raise unittest.SkipTest('This test requires Pulp 2.17.1 or newer.')
        api_client = api.Client(cfg, api.json_handler)
        body = gen_repo(
            importer_config={'feed': RPM_RICH_WEAK_FEED_URL},
        )
        repo = api_client.post(REPOSITORY_PATH, body)
        self.addCleanup(api_client.delete, repo['_href'])
        sync_repo(cfg, repo)
        repo = api_client.get(repo['_href'], params={'details': True})

        result = cli.Client(cfg).run(
            'pulp-admin rpm repo content rpm --repo-id {} '
            '--match name=Cobbler'
            .format(repo['id']).split()
        )
        required_fields = ('Recommends:', 'Requires:', 'Provides:')
        for field in required_fields:
            with self.subTest(field=field):
                self.assertEqual(result.stdout.count(field), 1, result)
    def setUpClass(cls):
        """Create an RPM repository and issue a task to download the repo.

        Do the following:

        1. Reset Pulp.
        2. Create a repository with the "on demand" download policy.
        3. Sync the repository.
        4. Trigger a repository download.
        5. Corrupt a file in the repository.
        6. Trigger a repository download, without unit verification.
        7. Trigger a repository download, with unit verification.
        """
        cls.cfg = config.get_config()
        if (not selectors.bug_is_fixed(1905, cls.cfg.pulp_version)
                and os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Ensure Pulp is empty of units otherwise we might just associate pre-
        # existing units.
        reset_pulp(cls.cfg)

        # Create and sync a repository.
        api_client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo(importer_config={
            'feed': RPM_UNSIGNED_FEED_URL,
            'download_policy': 'on_demand'
        })
        cls.repo = api_client.post(REPOSITORY_PATH, body)
        sync_repo(cls.cfg, cls.repo)

        # Trigger a repository download. Read the repo before and after.
        download_path = urljoin(cls.repo['_href'], 'actions/download/')
        params = {'details': True}
        cls.repo_pre_download = api_client.get(cls.repo['_href'],
                                               params=params)
        api_client.post(download_path, {'verify_all_units': False})
        cls.repo_post_download = api_client.get(cls.repo['_href'],
                                                params=params)

        # Corrupt an RPM. The file is there, but the checksum isn't right.
        rpm_abs_path = cls.get_rpm_abs_path()
        cli_client = cli.Client(cls.cfg)
        checksum_cmd = ('sha256sum ' + rpm_abs_path).split()
        cls.sha_pre_corruption = cli_client.run(checksum_cmd,
                                                sudo=True).stdout.strip()
        cli_client.run(('rm ' + rpm_abs_path).split(), sudo=True)
        cli_client.run(('touch ' + rpm_abs_path).split(), sudo=True)
        cli_client.run(('chown apache:apache ' + rpm_abs_path).split(),
                       sudo=True)
        cls.sha_post_corruption = cli_client.run(checksum_cmd,
                                                 sudo=True).stdout.strip()

        # Trigger repository downloads that don't and do checksum files, resp.
        api_client.post(download_path, {'verify_all_units': False})
        cls.unverified_file_sha = cli_client.run(checksum_cmd,
                                                 sudo=True).stdout.strip()
        api_client.post(download_path, {'verify_all_units': True})
        cls.verified_file_sha = cli_client.run(checksum_cmd,
                                               sudo=True).stdout.strip()
Exemple #15
0
    def do_test(self, recursive, recursive_conservative):
        """Copy of units for a repository with rich/weak dependencies."""
        repos = []
        body = gen_repo(importer_config={'feed': RPM_RICH_WEAK_FEED_URL},
                        distributors=[gen_distributor()])
        repos.append(self.client.post(REPOSITORY_PATH, body))
        self.addCleanup(self.client.delete, repos[0]['_href'])
        sync_repo(self.cfg, repos[0])
        repos.append(self.client.post(REPOSITORY_PATH, gen_repo()))
        self.addCleanup(self.client.delete, repos[1]['_href'])

        # Pulp 2.18.1 introduced a new flag `recursive_conservative`.
        # If true, units are copied together with their
        # dependencies, unless those are already satisfied by the content in
        # the target repository.
        override_config = {'recursive': recursive}
        if self.cfg.pulp_version >= Version('2.18.1'):
            override_config.update(
                {'recursive_conservative': recursive_conservative})
        self.client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'), {
                'source_repo_id': repos[0]['id'],
                'override_config': override_config,
                'criteria': {
                    'filters': {
                        'unit': {
                            'name': RPM2_RICH_WEAK_DATA['name']
                        }
                    },
                    'type_ids': ['rpm'],
                },
            })
        return self.client.get(repos[1]['_href'], params={'details': True})
Exemple #16
0
 def test_all(self):
     """Check if Pulp only associate missing repo content."""
     cfg = config.get_config()
     if cfg.pulp_version < Version('2.11'):
         self.skipTest(
             'Selective association is available on Pulp 2.11+ see Pulp '
             '#2457 for more information'
         )
     client = api.Client(cfg, api.json_handler)
     body = gen_repo()
     body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
     repo = client.post(REPOSITORY_PATH, body)
     self.addCleanup(client.delete, repo['_href'])
     sync_repo(cfg, repo)
     rpm_units = (
         _get_units_by_type(search_units(cfg, repo), 'rpm')
     )
     # Let's select up to 1/5 of the available units to remove
     to_remove = random.sample(
         rpm_units, random.randrange(int(RPM_UNSIGNED_FEED_COUNT / 4)))
     for unit in to_remove:
         _remove_unit(cfg, repo, unit)
     report = client.post(urljoin(repo['_href'], 'actions/sync/'))
     tasks = tuple(api.poll_spawned_tasks(cfg, report))
     self.assertEqual(len(tasks), 1, tasks)
     self.assertEqual(
         tasks[0]['result']['added_count'], len(to_remove), to_remove)
Exemple #17
0
 def test_all(self):
     """Package manager can consume RPM with rich/weak dependencies from Pulp."""
     cfg = config.get_config()
     if cfg.pulp_version < Version('2.17'):
         raise unittest.SkipTest('This test requires Pulp 2.17 or newer.')
     if not rpm_rich_weak_dependencies(cfg):
         raise unittest.SkipTest('This test requires RPM 4.12 or newer.')
     client = api.Client(cfg, api.json_handler)
     body = gen_repo(
         importer_config={'feed': RPM_RICH_WEAK_FEED_URL},
         distributors=[gen_distributor()]
     )
     repo = client.post(REPOSITORY_PATH, body)
     self.addCleanup(client.delete, repo['_href'])
     repo = client.get(repo['_href'], params={'details': True})
     sync_repo(cfg, repo)
     publish_repo(cfg, repo)
     repo_path = gen_yum_config_file(
         cfg,
         baseurl=urljoin(cfg.get_base_url(), urljoin(
             'pulp/repos/',
             repo['distributors'][0]['config']['relative_url']
         )),
         name=repo['_href'],
         repositoryid=repo['id']
     )
     cli_client = cli.Client(cfg)
     self.addCleanup(cli_client.run, ('rm', repo_path), sudo=True)
     rpm_name = 'Cobbler'
     pkg_mgr = cli.PackageManager(cfg)
     pkg_mgr.install(rpm_name)
     self.addCleanup(pkg_mgr.uninstall, rpm_name)
     rpm = cli_client.run(('rpm', '-q', rpm_name)).stdout.strip().split('-')
     self.assertEqual(rpm_name, rpm[0])
Exemple #18
0
    def test_invalid_file_feed(self):
        """Create and sync a ISO repo from an invalid file feed.

        Assert that the sync fails with the information that some units were
        not available.
        """
        if self.cfg.pulp_version < Version('2.11'):
            self.skipTest(
                'Pulp reports 404 for ISO repos only on 2.11 or greater.')
        if not selectors.bug_is_fixed(3899, self.cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/3899')
        pulp_manifest = self.parse_pulp_manifest(FILE_MIXED_FEED_URL)
        missing = [
            row['name'] for row in pulp_manifest
            if row['name'].startswith('missing')
        ]
        client = api.Client(self.cfg, api.json_handler)
        repo = client.post(REPOSITORY_PATH, _gen_iso_repo(FILE_MIXED_FEED_URL))
        self.addCleanup(client.delete, repo['_href'])
        with self.assertRaises(exceptions.TaskReportError) as context:
            sync_repo(self.cfg, repo)
        task = context.exception.task
        self.assertIsNotNone(task['error'])
        # Description is a string generated after a Python's list of dicts
        # object. Adjust the string so we can parse it as JSON instead of using
        # eval. Having this as a Python object helps inspecting the message
        description = json.loads(task['error']['description'].replace(
            'u\'', '\'')  # noqa
                                 .replace('\'', '"')  # noqa
                                 )
        for info in description:
            with self.subTest(name=info['name']):
                self.assertEqual(info['error']['response_code'], 404)
                self.assertEqual(info['error']['response_msg'], 'Not Found')
                self.assertIn(info['name'], missing)
Exemple #19
0
    def test_broken_simlinks(self):
        """Test broken symlinks."""
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo(
            importer_config={'feed': RPM_YUM_METADATA_FILE},
            distributors=[gen_distributor(auto_publish=True)]
        )
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        sync_repo(self.cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})

        # Assert that there is a yum_repo_metadata file present in the repo.
        self.assertEqual(
            repo['content_unit_counts']['yum_repo_metadata_file'],
            1,
            repo
        )

        path = os.path.join(
            '/var/lib/pulp/published/yum/https/repos/',
            repo['distributors'][0]['config']['relative_url'],
            'repodata'
        )

        # Assert that the productid was not saved as symlink
        productid_symlink = self.find_productid(True, path)
        self.assertEqual(len(productid_symlink), 0, productid_symlink)

        # Assert that the productid was saved as a file
        productid_file = self.find_productid(False, path)
        self.assertEqual(len(productid_file), 1, productid_symlink)
Exemple #20
0
 def test_all(self):
     """Sync two repositories w/identical content but differing layouts."""
     cfg = config.get_config()
     if check_issue_3104(cfg):
         self.skipTest('https://pulp.plan.io/issues/3104')
     if check_issue_2798(cfg):
         self.skipTest('https://pulp.plan.io/issues/2798')
     if check_issue_2354(cfg):
         self.skipTest('https://pulp.plan.io/issues/2354')
     if (os_is_f26(cfg)
             and not selectors.bug_is_fixed(3036, cfg.pulp_version)):
         # Here, the calls to get_unit() cause pulp_streamer.service to die
         # without logging out anything. In Pulp #3036, certain actions
         # cause pulp_streamer.service to die while logging out a core dump.
         # Thus, this test failure might be unrelated to Pulp #3036.
         self.skipTest('https://pulp.plan.io/issues/3036')
     repos = [
         self.create_repo(cfg, feed, 'on_demand')
         for feed in (RPM_ALT_LAYOUT_FEED_URL, RPM_UNSIGNED_FEED_URL)
     ]
     for repo in repos:
         sync_repo(cfg, repo)
     for repo in repos:
         publish_repo(cfg, repo)
     rpms = []
     for repo in repos:
         with self.subTest(repo=repo):
             rpms.append(
                 get_unit(cfg, repo['distributors'][0], RPM).content)
     self.assertEqual(len(rpms), len(repos))
     self.assertEqual(rpms[0], rpms[1], repos)
Exemple #21
0
 def setUp(self):
     """Create and sync a docker repository."""
     super().setUp()
     self.repo = create_docker_repo(self.cfg, get_upstream_name(self.cfg))
     self.addCleanup(api.Client(self.cfg).delete, self.repo['_href'])
     sync_repo(self.cfg, self.repo)
     self.repo = api.Client(self.cfg, api.json_handler).get(
         self.repo['_href'], params={'details': True})
     self.tags = self._get_tags()
Exemple #22
0
    def test_all(self):
        """Test whether copied files retain their original mtime.

        This test targets the following issues:

        * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_
        * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_

        Do the following:

        1. Create, sync and publish a repository, with ``generate_sqlite`` set
           to true.
        2. Get the ``mtime`` of the sqlite files.
        3. Upload an RPM package into the repository, and sync the repository.
        4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes
           are the same.
        """
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2783, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2783')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config']['generate_sqlite'] = True
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)

        # Get the mtime of the sqlite files.
        cli_client = cli.Client(cfg, cli.echo_handler)
        cmd = '' if cli.is_root(cfg) else 'sudo '
        cmd += "bash -c \"stat --format %Y '{}'/*\"".format(
            os.path.join(
                _PATH,
                repo['distributors'][0]['config']['relative_url'],
                'repodata',
            ))
        # machine.session is used here to keep SSH session open
        mtimes_pre = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())

        # Upload to the repo, and sync it.
        rpm = utils.http_get(RPM_SIGNED_URL)
        upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
        sync_repo(cfg, repo)

        # Get the mtime of the sqlite files again.
        time.sleep(1)
        # machine.session is used here to keep SSH session open
        mtimes_post = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())
        self.assertEqual(mtimes_pre, mtimes_post)
Exemple #23
0
    def setUpClass(cls):
        """Create several schedules.

        Each schedule is created to test a different failure scenario.
        """
        super().setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        sync_repo(cls.cfg, repo)

        # Add a distibutor
        distributor = gen_distributor()
        client.post(urljoin(repo['_href'], 'distributors/'), distributor)
        client.response_handler = api.echo_handler
        cls.bodies = (
            {
                'schedule': None
            },  # 400
            {
                'unknown': 'parameter',
                'schedule': 'PT30S'
            },  # 400
            ['Incorrect data type'],  # 400
            {
                'missing_required_keys': 'schedule'
            },  # 400
            {
                'schedule': 'PT30S'
            },  # tests incorrect distributor in url, 404
            {
                'schedule': 'PT30S'
            },  # tests incorrect repo in url, 404
        )
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        bad_distributor_url = '/'.join(
            ['distributors',
             utils.uuid4(), 'schedules/publish/'])
        bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()])
        cls.paths = (urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], bad_distributor_url),
                     urljoin(bad_repo_path, scheduling_url))
        cls.status_codes = (400, 400, 400, 400, 404, 404)
        cls.responses = [
            client.post(path, req_body)
            for path, req_body in zip(cls.paths, cls.bodies)
        ]
Exemple #24
0
 def create_sync_repo(self, feed):
     """Create and sync a repository given a feed."""
     body = gen_repo(importer_config={'feed': feed},
                     distributors=[gen_distributor()])
     # Using on_demand since its the default policy used by Satellite
     body['importer_config']['download_policy'] = 'on_demand'
     repo = self.client.post(REPOSITORY_PATH, body)
     self.addCleanup(self.client.delete, repo['_href'])
     sync_repo(self.cfg, repo)
     return self.client.get(repo['_href'], params={'details': True})
Exemple #25
0
 def setUpClass(cls):
     """Create class-wide variables."""
     cls.cfg = config.get_config()
     cls.client = api.Client(cls.cfg, api.json_handler)
     body = gen_repo(importer_config={'feed': RPM_UNSIGNED_FEED_URL},
                     distributors=[gen_distributor()])
     cls.repo = cls.client.post(REPOSITORY_PATH, body)
     cls.repo = cls.client.get(cls.repo['_href'], params={'details': True})
     sync_repo(cls.cfg, cls.repo)
     cls.errata = _gen_errata()
Exemple #26
0
    def test_all(self):
        """Test whether ``httpd`` dispatches a task while the broker is down.

        This test targets the following issues:

        * `Pulp Smash #650 <https://github.com/PulpQE/pulp-smash/issues/650>`_
        * `Pulp #2770 <https://pulp.plan.io/issues/2770>`_

        This test does the following:

        1. Create a repository.
        2. Stop the AMQP broker. (Also, schedule it to be re-started later!)
        3. Sync the repository, ignore any errors that are returned when doing
           so, and assert that no tasks are left in the ``waiting`` state.
        """
        cfg = config.get_config()
        if not selectors.bug_is_fixed(2770, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2770')

        # Create a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Stop the AMQP broker.
        broker = [get_broker(cfg)]
        svc_mgr = cli.GlobalServiceManager(cfg)
        svc_mgr.stop(broker)
        self.addCleanup(svc_mgr.start, broker)

        # Sync the repo, and assert no tasks are left in the waiting state.
        try:
            sync_repo(cfg, repo)
        except HTTPError:
            pass
        tasks = client.post(
            urljoin(TASKS_PATH, 'search/'), {
                'criteria': {
                    'fields': [
                        'finish_time',
                        'start_time',
                        'state',
                        'tags',
                        'task_id',
                    ],
                    'filters': {
                        'state': {
                            '$in': ['waiting']
                        }
                    },
                }
            })
        self.assertEqual(len(tasks), 0, tasks)
Exemple #27
0
    def test_all(self):
        """Sync a repo whose updateinfo file has multiple pkglist sections.

        Specifically, do the following:

        1. Create, sync and publish an RPM repository whose feed is set to
           ``pulp_2_tests.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL``.
        2. Fetch and parse the published repository's ``updateinfo.xml`` file.

        Verify that the ``updateinfo.xml`` file has three packages whose
        ``<filename>`` elements have the following text:

        * penguin-0.9.1-1.noarch.rpm
        * shark-0.1-1.noarch.rpm
        * walrus-5.21-1.noarch.rpm

        Note that Pulp is free to change the structure of a source repository
        at will. For example, the source repository has three ``<collection>``
        elements, the published repository can have one, two or three
        ``<collection>`` elements. Assertions are not made about these details.
        """
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        if not selectors.bug_is_fixed(2227, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2277')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        sync_repo(cfg, repo)
        publish_repo(cfg, repo)

        # Fetch and parse ``updateinfo.xml``.
        updates_element = (get_repodata(cfg, repo['distributors'][0],
                                        'updateinfo'))

        # Verify the ``updateinfo.xml`` file.
        debug = ElementTree.tostring(updates_element)
        filename_elements = (updates_element.findall(
            'update/pkglist/collection/package/filename'))
        filenames = [
            filename_element.text for filename_element in filename_elements
        ]
        filenames.sort()
        self.assertEqual(filenames, [
            'penguin-0.9.1-1.noarch.rpm',
            'shark-0.1-1.noarch.rpm',
            'walrus-5.21-1.noarch.rpm',
        ], debug)
Exemple #28
0
 def setUpClass(cls):
     """Create and sync a repository."""
     super().setUpClass()
     client = api.Client(cls.cfg, api.json_handler)
     body = gen_repo()
     body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
     body['distributors'] = [gen_distributor()]
     repo = client.post(REPOSITORY_PATH, body)
     cls.resources.add(repo['_href'])
     sync_repo(cls.cfg, repo)
     cls.repo = client.get(repo['_href'], params={'details': True})
Exemple #29
0
    def _create_sync_repo(self, feed_url):
        """Create a repository with the given feed and sync it.

        Return the repository's href.
        """
        self.addCleanup(self.client.delete, ORPHANS_PATH)
        body = gen_repo()
        body['importer_config']['feed'] = feed_url
        repo = self.client.post(REPOSITORY_PATH, body)
        self.addCleanup(self.client.delete, repo['_href'])
        sync_repo(self.cfg, repo)
        return repo['_href']
Exemple #30
0
 def setUpClass(cls):
     """Create and sync a repository."""
     if inspect.getmro(cls)[0] == BaseSearchTestCase:
         raise unittest.SkipTest('Abstract base class.')
     super().setUpClass()
     if check_issue_2620(cls.cfg):
         raise unittest.SkipTest('https://pulp.plan.io/issues/2620')
     body = gen_repo()
     body['importer_config']['feed'] = cls.get_feed_url()
     cls.repo = api.Client(cls.cfg).post(REPOSITORY_PATH, body).json()
     cls.resources.add(cls.repo['_href'])
     sync_repo(cls.cfg, cls.repo)
 def test_post(self):
     """Assert the function makes an HTTP POST request."""
     with mock.patch.object(api, 'Client') as client:
         response = sync_repo(mock.Mock(), {'_href': 'http://example.com'})
     self.assertIs(response, client.return_value.post.return_value)