Пример #1
0
    def setUpClass(cls):
        """Intentionally fail at creating several sync schedules for a repo.

        Each schedule tests a different failure scenario.
        """
        super(CreateFailureTestCase, cls).setUpClass()
        href, importer_type_id = cls.create_repo()

        # We'll need these below.
        scheduling_path = _SCHEDULE_PATH.format(importer_type_id)
        scheduling_path_bad = _SCHEDULE_PATH.format(utils.uuid4())
        bad_repo_path = "{}/{}/".format(REPOSITORY_PATH, utils.uuid4())

        # Use client to get paths with bodies. Save responses and status_codes.
        client = api.Client(cls.cfg)
        client.response_handler = api.echo_handler
        paths = (
            urljoin(href, scheduling_path),
            urljoin(href, scheduling_path),
            urljoin(href, scheduling_path),
            urljoin(href, scheduling_path),
            urljoin(href, scheduling_path_bad),
            urljoin(bad_repo_path, scheduling_path),
        )
        cls.bodies = (
            {"schedule": None},  # 400
            {"schedule": "PT30S", "unknown": "parameter"},  # 400
            ["Incorrect data type"],  # 400
            {"missing_required_keys": "schedule"},  # 400
            _SCHEDULE,  # tests incorrect importer in url, 404
            _SCHEDULE,  # tests incorrect repo in url, 404
        )
        cls.responses = tuple((client.post(path, body) for path, body in zip(paths, cls.bodies)))
        cls.status_codes = (400, 400, 400, 400, 404, 404)
Пример #2
0
    def setUpClass(cls):
        """Create three repositories and read, update and delete them."""
        super(ReadUpdateDeleteTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)
        cls.repos = tuple((
            client.post(REPOSITORY_PATH, {'id': utils.uuid4()})
            for _ in range(3)
        ))
        cls.responses = {}
        client.response_handler = api.safe_handler

        # Read the first repo
        path = cls.repos[0]['_href']
        cls.responses['read'] = client.get(path)
        for key in {'importers', 'distributors', 'details'}:
            cls.responses['read_' + key] = client.get(path, params={key: True})

        # Update the second repo
        path = cls.repos[1]['_href']
        cls.update_body = {'delta': {
            key: utils.uuid4() for key in {'description', 'display_name'}
        }}
        cls.responses['update'] = client.put(path, cls.update_body)

        # Delete the third.
        cls.responses['delete'] = client.delete(cls.repos[2]['_href'])
Пример #3
0
 def test_02_read_invalid_date(self):
     """Read a task by an invalid date."""
     with self.assertRaises(HTTPError):
         self.filter_tasks({
             'finished_at': utils.uuid4(),
             'started_at': utils.uuid4()
         })
Пример #4
0
    def setUpClass(cls):
        """Create three users and read, update and delete them respectively."""
        super(ReadUpdateDeleteTestCase, cls).setUpClass()

        # Create three users and save their attributes.
        client = api.Client(cls.cfg, response_handler=api.json_handler)
        hrefs = [
            client.post(USER_PATH, {'login': utils.uuid4()})['_href']
            for _ in range(3)
        ]

        # Read, update and delete the users, and save the raw responses.
        client.response_handler = api.safe_handler
        cls.update_body = {'delta': {
            'name': utils.uuid4(),
            'password': utils.uuid4(),
            'roles': ['super-users'],
        }}
        cls.responses = {}
        cls.responses['read'] = client.get(hrefs[0])
        cls.responses['update'] = client.put(hrefs[1], cls.update_body)
        cls.responses['delete'] = client.delete(hrefs[2])

        # Read, update and delete the deleted user, and save the raw responses.
        client.response_handler = api.echo_handler
        cls.responses['read deleted'] = client.get(hrefs[2])
        cls.responses['update deleted'] = client.put(hrefs[2], {})
        cls.responses['delete deleted'] = client.delete(hrefs[2])

        # Mark resources to be deleted.
        cls.resources = {hrefs[0], hrefs[1]}
Пример #5
0
    def setUpClass(cls):
        """Create a user and add it to the 'super-users' role.

        Search for:

        * Nothing at all:
        * All users having only the super-users role.
        * All users having no roles.
        * A user by their login.
        * A non-existent user by their login.
        """
        super(SearchTestCase, cls).setUpClass()

        # Create a super-user.
        client = api.Client(cls.cfg, response_handler=api.json_handler)
        cls.user = client.post(USER_PATH, {'login': utils.uuid4()})
        client.put(cls.user['_href'], {'delta': {'roles': ['super-users']}})
        cls.user = client.get(cls.user['_href'])

        # Formulate and execute searches, and save raw responses.
        client.response_handler = api.safe_handler
        cls.searches = tuple((
            {'criteria': {}},
            {'criteria': {'filters': {'roles': ['super-users']}}},
            {'criteria': {'filters': {'roles': []}}},
            {'criteria': {'filters': {'login': cls.user['login']}}},
            {'criteria': {'filters': {'login': utils.uuid4()}}},
        ))
        cls.responses = tuple((
            client.post(USER_PATH + 'search/', search)
            for search in cls.searches
        ))
Пример #6
0
    def setUpClass(cls):
        """Create a repository and add an importer and distributor to it.

        Do the following:

        1. Create a repository.
        2. Read the repository's importers and distributors.
        3. Add an importer and distributor to the repo.
        4. Re-read the repository's importers and distributors.
        """
        super(AddImporterDistributorTestCase, cls).setUpClass()
        if cls.cfg.version >= Version("2.10") and selectors.bug_is_untestable(2082, cls.cfg.version):
            raise SkipTest("https://pulp.plan.io/issues/2082")

        # Steps 1 and 2.
        client = api.Client(cls.cfg, api.json_handler)
        href = client.post(REPOSITORY_PATH, {"id": utils.uuid4()})["_href"]
        cls.resources.add(href)
        cls.pre_imp = client.get(urljoin(href, "importers/"))
        cls.pre_dist = client.get(urljoin(href, "distributors/"))

        # Steps 3 and 4.
        client.response_handler = api.safe_handler
        cls.add_imp = client.post(urljoin(href, "importers/"), {"importer_type_id": "iso_importer"})
        cls.add_dist = client.post(
            urljoin(href, "distributors/"),
            {"distributor_config": {}, "distributor_id": utils.uuid4(), "distributor_type_id": "iso_distributor"},
        )
        client.response_handler = api.json_handler
        cls.post_imp = client.get(urljoin(href, "importers/"))
        cls.post_dist = client.get(urljoin(href, "distributors/"))
Пример #7
0
def _gen_distributor():
    """Return a semi-random dict for use in creating a YUM distributor."""
    return {
        "auto_publish": False,
        "distributor_id": utils.uuid4(),
        "distributor_type_id": "yum_distributor",
        "distributor_config": {"http": True, "https": True, "relative_url": utils.uuid4() + "/"},
    }
Пример #8
0
 def test_success(self):
     """Assert the method returns a path when a config file is found."""
     with mock.patch.object(xdg.BaseDirectory, 'load_config_paths') as lcp:
         lcp.return_value = ('an_iterable', 'of_xdg', 'config_paths')
         with mock.patch.object(os.path, 'isfile') as isfile:
             isfile.return_value = True
             # pylint:disable=protected-access
             config._get_config_file_path(utils.uuid4(), utils.uuid4())
     self.assertGreater(isfile.call_count, 0)
Пример #9
0
    def setUpClass(cls):
        """Create three repositories and read, update and delete them."""
        super(ReadUpdateDeleteTestCase, cls).setUpClass()
        cls.bodies = {
            'read': {
                'distributors': [_DISTRIBUTOR],
                'id': utils.uuid4(),
                'importer_config': {},
                'importer_type_id': 'iso_importer',
                'notes': {'this': 'one'},
            },
            'update': {  # like read, minus notes…
                'description': utils.uuid4(),  # plus this
                'display_name': utils.uuid4(),  # and this
                'distributors': [_DISTRIBUTOR],
                'id': utils.uuid4(),
                'importer_config': {},
                'importer_type_id': 'iso_importer',
            },
            'delete': {  # like read…
                'description': utils.uuid4(),  # plus this
                'display_name': utils.uuid4(),  # and this
                'distributors': [_DISTRIBUTOR],
                'id': utils.uuid4(),
                'importer_config': {},
                'importer_type_id': 'iso_importer',
                'notes': {utils.uuid4(): utils.uuid4()},
            },
        }
        cls.update_body = {'delta': {
            key: utils.uuid4() for key in ('description', 'display_name')
        }}
        cls.responses = {}

        # Create repositories.
        client = api.Client(cls.cfg, api.json_handler)
        repos = {
            key: client.post(REPOSITORY_PATH, body)
            for key, body in cls.bodies.items()
        }
        for key in {'read', 'update'}:
            cls.resources.add(repos[key]['_href'])

        # Read, update and delete the repositories.
        client.response_handler = api.safe_handler
        cls.responses['read'] = client.get(repos['read']['_href'])
        for key in {'importers', 'distributors', 'details'}:
            cls.responses['read_' + key] = client.get(
                repos['read']['_href'],
                params={key: True},
            )
        cls.responses['update'] = client.put(
            repos['update']['_href'],
            cls.update_body,
        )
        cls.responses['delete'] = client.delete(repos['delete']['_href'])
Пример #10
0
 def test_failures(self):
     """Assert the  method raises an exception when no config is found."""
     with mock.patch.object(xdg.BaseDirectory, 'load_config_paths') as lcp:
         lcp.return_value = ('an_iterable', 'of_xdg', 'config_paths')
         with mock.patch.object(os.path, 'isfile') as isfile:
             isfile.return_value = False
             with self.assertRaises(exceptions.ConfigFileNotFoundError):
                 # pylint:disable=protected-access
                 config._get_config_file_path(utils.uuid4(), utils.uuid4())
     self.assertGreater(isfile.call_count, 0)
Пример #11
0
def _gen_distributor():
    """Return a semi-random dict for use in creating a YUM distributor."""
    return {
        'auto_publish': False,
        'distributor_id': utils.uuid4(),
        'distributor_type_id': 'yum_distributor',
        'distributor_config': {
            'http': True,
            'https': True,
            'relative_url': utils.uuid4() + '/',
        },
    }
Пример #12
0
 def setUpClass(cls):
     """Create an ISO RPM repo with an importer and distributor."""
     super(CreateTestCase, cls).setUpClass()
     cls.body = {
         'description': utils.uuid4(),
         'display_name': utils.uuid4(),
         'distributors': [_DISTRIBUTOR],
         'id': utils.uuid4(),
         'importer_config': {},
         'importer_type_id': 'iso_importer',
         'notes': {utils.uuid4(): utils.uuid4()},
     }
     cls.response = api.Client(cls.cfg).post(REPOSITORY_PATH, cls.body)
Пример #13
0
 def setUpClass(cls):
     """Create an ISO RPM repo with an importer and distributor."""
     super(CreateTestCase, cls).setUpClass()
     cls.body = {
         "description": utils.uuid4(),
         "display_name": utils.uuid4(),
         "distributors": [_DISTRIBUTOR],
         "id": utils.uuid4(),
         "importer_config": {},
         "importer_type_id": "iso_importer",
         "notes": {utils.uuid4(): utils.uuid4()},
     }
     cls.response = api.Client(cls.cfg).post(REPOSITORY_PATH, cls.body)
    def setUpClass(cls):
        """Create several schedules.

        Each schedule is created to test a different failure scenario.
        """
        super(CreateFailureTestCase, cls).setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Add a distibutor
        distributor = gen_distributor()
        client.post(
            urljoin(repo['_href'], 'distributors/'),
            distributor
        )
        client.response_handler = api.echo_handler
        cls.bodies = (
            {'schedule': None},  # 400
            {'unknown': 'parameter', 'schedule': 'PT30S'},  # 400
            ['Incorrect data type'],  # 400
            {'missing_required_keys': 'schedule'},  # 400
            {'schedule': 'PT30S'},  # tests incorrect distributor in url, 404
            {'schedule': 'PT30S'},  # tests incorrect repo in url, 404
        )
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        bad_distributor_url = '/'.join([
            'distributors', utils.uuid4(), 'schedules/publish/'
        ])
        bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()])
        cls.paths = (
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], bad_distributor_url),
            urljoin(bad_repo_path, scheduling_url)
        )
        cls.status_codes = (400, 400, 400, 400, 404, 404)
        cls.responses = [
            client.post(path, req_body) for path, req_body in zip(
                cls.paths, cls.bodies)
        ]
Пример #15
0
    def setUpClass(cls):
        """Create a value for the rsync distrib's ``remote`` config section.

        Using the same config for each of the test methods allows the test
        methods to behave more similarly.
        """
        cls.cfg = config.get_config()
        ssh_user = utils.uuid4()[:12]
        cls.remote = {
            'host': 'example.com',
            'root': '/home/' + ssh_user,
            'ssh_identity_file': '/' + utils.uuid4(),
            'ssh_user': ssh_user,
        }
        cls._remote = cls.remote.copy()
Пример #16
0
def _gen_attrs():
    """Generate attributes for populating a ``ServerConfig``.

    Example usage: ``ServerConfig(**_gen_attrs())``.

    :returns: A dict. It populates all attributes in a ``ServerConfig``.
    """
    attrs = {
        key: utils.uuid4() for key in ('base_url', 'cli_transport', 'verify')
    }
    attrs['auth'] = [utils.uuid4() for _ in range(2)]
    attrs['version'] = '.'.join(
        type('')(random.randint(1, 150)) for _ in range(4)
    )
    return attrs
    def make_repo(self, cfg, remote):
        """Create a repository with an importer and pair of distributors.

        Create an RPM repository with:

        * A yum importer with a valid feed.
        * A yum distributor.
        * An RPM rsync distributor referencing the yum distributor.

        In addition, schedule the repository for deletion.

        :param pulp_smash.config.ServerConfig cfg: Information about the Pulp
            server being targeted.
        :param remote: A dict for the RPM rsync distributor's ``remote``
            section.
        :returns: The repository's href, as a string.
        """
        api_client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'].append({
            'distributor_id': utils.uuid4(),
            'distributor_type_id': 'rpm_rsync_distributor',
            'distributor_config': {
                'predistributor_id': body['distributors'][0]['distributor_id'],
                'remote': remote,
            }
        })
        repo_href = api_client.post(REPOSITORY_PATH, body)['_href']
        self.addCleanup(api_client.delete, repo_href)
        return repo_href
Пример #18
0
    def test_force_sync(self):
        """Test whether one can force Pulp to perform a full sync."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(1982, cfg.version):
            self.skipTest("https://pulp.plan.io/issues/1982")

        # Create and sync a repository.
        client = cli.Client(cfg)
        repo_id = utils.uuid4()
        client.run("pulp-admin rpm repo create --repo-id {} --feed {}".format(repo_id, RPM_SIGNED_FEED_URL).split())
        self.addCleanup(client.run, "pulp-admin rpm repo delete --repo-id {}".format(repo_id).split())
        sync_repo(cfg, repo_id)

        # Delete a random RPM
        rpms = self._list_rpms(cfg)
        client.run("{} rm -rf {}".format("sudo" if not is_root(cfg) else "", random.choice(rpms)).split())
        with self.subTest(comment="Verify the RPM was removed."):
            self.assertEqual(len(self._list_rpms(cfg)), len(rpms) - 1)

        # Sync the repository *without* force_sync.
        sync_repo(cfg, repo_id)
        with self.subTest(comment="Verify the RPM has not been restored."):
            self.assertEqual(len(self._list_rpms(cfg)), len(rpms) - 1)

        # Sync the repository again
        sync_repo(cfg, repo_id, force_sync=True)
        with self.subTest(comment="Verify the RPM has been restored."):
            self.assertEqual(len(self._list_rpms(cfg)), len(rpms))
Пример #19
0
    def make_repo(self, cfg, dist_cfg_updates):
        """Create a repository with an importer and pair of distributors.

        Create an RPM repository with:

        * A yum importer with a valid feed.
        * A yum distributor.
        * An RPM rsync distributor referencing the yum distributor.

        In addition, schedule the repository for deletion.

        :param pulp_smash.config.ServerConfig cfg: Information about the Pulp
            server being targeted.
        :param dist_cfg_updates: A dict to be merged into the RPM rsync
            distributor's ``distributor_config`` dict. At a minimum, this
            argument should have a value of ``{'remote': {…}}``.
        :returns: A detailed dict of information about the repo.
        """
        api_client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'].append({
            'distributor_id': utils.uuid4(),
            'distributor_type_id': 'rpm_rsync_distributor',
            'distributor_config': {
                'predistributor_id': body['distributors'][0]['distributor_id'],
            }
        })
        body['distributors'][1]['distributor_config'].update(dist_cfg_updates)
        repo = api_client.post(REPOSITORY_PATH, body)
        self.addCleanup(api_client.delete, repo['_href'])
        return api_client.get(repo['_href'], params={'details': True})
Пример #20
0
 def test_01_create_task(self):
     """Create a task."""
     repo = self.client.post(REPO_PATH, gen_repo())
     self.addCleanup(self.client.delete, repo['_href'])
     attrs = {'description': utils.uuid4()}
     response = self.client.patch(repo['_href'], attrs)
     self.task.update(self.client.get(response['task']))
Пример #21
0
    def setUpClass(cls):
        """Create distributors with legal and illegal relative paths."""
        super(CreateDistributorsTestCase, cls).setUpClass()
        cls.responses = []

        relative_paths = [_gen_rel_path(), _gen_rel_path(), _gen_rel_path(3)]
        relative_paths.append(relative_paths[0])
        relative_paths.append(relative_paths[0] + '/' + utils.uuid4())
        relative_paths.append('/' + relative_paths[0])

        # Create two repositories
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, gen_repo()) for _ in range(2)]
        for repo in repos:
            cls.resources.add(repo['_href'])  # mark for deletion

        # Create a distributor for the first repository
        client.response_handler = api.echo_handler
        path = urljoin(repos[0]['_href'], 'distributors/')
        body = _gen_distributor(relative_paths[0])
        cls.responses.append(client.post(path, body))

        # Create distributors for the second repository
        path = urljoin(repos[1]['_href'], 'distributors/')
        for relative_path in relative_paths[1:]:
            body = _gen_distributor(relative_path)
            cls.responses.append(client.post(path, body))
Пример #22
0
    def test_sync_downloaded_content(self):
        """Create two repositories with the same feed, and sync them serially.

        More specifically, this test creates two puppet repositories with
        identical feeds, syncs them serially, and verifies that both have equal
        non-zero content unit counts.
        """
        cfg = config.get_config()
        if selectors.bug_is_untestable(1937, cfg.version):
            self.skipTest('https://pulp.plan.io/issues/1937')
        utils.pulp_admin_login(cfg)

        # Create two repos, schedule them for deletion, and sync them.
        client = cli.Client(cfg)
        repo_ids = [utils.uuid4() for _ in range(2)]
        for repo_id in repo_ids:
            client.run((
                'pulp-admin puppet repo create '
                '--repo-id {} --feed {} --queries {}'
            ).format(repo_id, PUPPET_FEED, PUPPET_QUERY).split())
            self.addCleanup(client.run, (
                'pulp-admin puppet repo delete --repo-id {}'
            ).format(repo_id).split())
            client.run((
                'pulp-admin puppet repo sync run --repo-id {}'
            ).format(repo_id).split())

        # Verify the number of puppet modules in each repository.
        unit_counts = [
            get_num_units_in_repo(cfg, repo_id) for repo_id in repo_ids
        ]
        for i, unit_count in enumerate(unit_counts):
            with self.subTest(i=i):
                self.assertGreater(unit_count, 0)
        self.assertEqual(unit_counts[0], unit_counts[1])
    def test_publish_override_config(self):
        """Use the ``packages_directory`` publish override option.

        Create a distributor with default options, and use it to publish the
        repository. Specify the ``packages_directory`` option during the
        publish as an override option. Verify packages end up in the specified
        directory, relative to the published repository's root.
        """
        if selectors.bug_is_untestable(1976, self.cfg.version):
            self.skipTest('https://pulp.plan.io/issues/1976')
        client = api.Client(self.cfg, api.json_handler)
        distributor = client.post(
            urljoin(self.repo_href, 'distributors/'),
            gen_distributor(),
        )
        packages_dir = utils.uuid4()
        client.post(urljoin(self.repo_href, 'actions/publish/'), {
            'id': distributor['id'],
            'override_config': {'packages_directory': packages_dir},
        })
        primary_xml = get_parse_repodata_primary_xml(self.cfg, distributor)
        package_hrefs = get_package_hrefs(primary_xml)
        self.assertGreater(len(package_hrefs), 0)
        for package_href in package_hrefs:
            with self.subTest(package_href=package_href):
                self.assertEqual(os.path.dirname(package_href), packages_dir)
Пример #24
0
def _gen_realistic_group():
    """Return a realistic, typical group unit.

    Most supported fields are filled in on this unit, and there are a few
    translated strings.
    """
    return {
        'id': utils.uuid4(),
        'name': 'Additional Development',
        'translated_name': {'es': 'Desarrollo adicional', 'zh_CN': '附加开发'},
        'description': (
            'Additional development headers and libraries for building '
            'open-source applications'
        ),
        'translated_description': {
            'es': (
                'Encabezados adicionales y bibliotecas para compilar '
                'aplicaciones de código abierto.'
            ),
            'zh_CN': '用于构建开源应用程序的附加开发标头及程序可。',
        },
        'default': True,
        'user_visible': True,
        'display_order': 55,
        'mandatory_package_names': ['PyQt4-devel', 'SDL-devel'],
        'default_package_names': ['perl-devel', 'polkit-devel'],
        'optional_package_names': ['binutils-devel', 'python-devel'],
        'conditional_package_names': [
            ('perl-Test-Pod', 'perl-devel'),
            ('python-setuptools', 'python-devel')
        ],
    }
Пример #25
0
 def setUpClass(cls):
     """Create a content source."""
     super(RefreshAndDeleteContentSourcesTestCase, cls).setUpClass()
     cls.cfg = config.get_config()
     if cls.cfg.version < Version('2.8.6'):
         raise unittest.SkipTest('This test requires at least 2.8.6')
     pulp_admin_login(cls.cfg)
     cls.client = cli.Client(cls.cfg)
     cls.content_source_id = uuid4()
     content_source_path = generate_content_source(
         cls.cfg,
         cls.content_source_id,
         enabled='1',
         type='yum',
         base_url=RPM_SIGNED_FEED_URL,
     )
     sudo = '' if is_root(cls.cfg) else 'sudo '
     cls.responses = [
         cls.client.run(
             'pulp-admin content sources refresh'.split()
         ),
         _get_content_source_ids(cls.cfg),
         cls.client.run(
             'pulp-admin content sources refresh --source-id {}'
             .format(cls.content_source_id).split()
         ),
     ]
     cls.client.run(
         '{}rm -f {}'.format(sudo, content_source_path).split())
     cls.responses.append(_get_content_source_ids(cls.cfg))
Пример #26
0
def gen_repo():
    """Return a semi-random dict that used for creating a Docker repo."""
    return {
        'id': utils.uuid4(), 'importer_config': {},
        'importer_type_id': 'docker_importer',
        'notes': {'_repo-type': 'docker-repo'},
    }
Пример #27
0
def _gen_minimal_group():
    """Return a group unit which is as empty as possible.

    This unit omits every non-mandatory field (which, in practice, means that
    it includes only an 'id').
    """
    return {'id': utils.uuid4()}
def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip tests. Create and sync an RPM repository.

    Skip tests in this module if the RPM plugin is not installed on the target
    Pulp server. Then create an RPM repository with a feed and sync it. Test
    cases may copy data from this repository but should **not** change it.
    """
    set_up_module()
    cfg = config.get_config()
    client = cli.Client(config.get_config())

    # log in, then create repository
    utils.pulp_admin_login(cfg)
    global _REPO_ID  # pylint:disable=global-statement
    _REPO_ID = utils.uuid4()
    client.run(
        'pulp-admin rpm repo create --repo-id {} --feed {}'
        .format(_REPO_ID, constants.RPM_FEED_URL).split()
    )

    # If setUpModule() fails, tearDownModule() isn't run. In addition, we can't
    # use addCleanup(), as it's an instance method. If this set-up procedure
    # grows, consider implementing a stack of tear-down steps instead.
    try:
        client.run(
            'pulp-admin rpm repo sync run --repo-id {}'
            .format(_REPO_ID).split()
        )
    except subprocess.CalledProcessError:
        client.run(
            'pulp-admin rpm repo delete --repo-id {}'.format(_REPO_ID).split()
        )
        raise
Пример #29
0
def gen_repo():
    """Return a semi-random dict for use in creating a Python repository."""
    return {
        'id': utils.uuid4(),
        'importer_config': {},
        'importer_type_id': 'python_importer',
    }
Пример #30
0
    def setUpClass(cls):
        """Bind a consumer to a distributor.

        Do the following:

        1. Add a consumer.
        2. Add a repository.
        3. Add a distributor to the repository.
        4. Bind the consumer to the distributor.
        """
        super(BindConsumerTestCase, cls).setUpClass()

        # Steps 1–3
        client = api.Client(cls.cfg, api.json_handler)
        cls.consumer = client.post(CONSUMER_PATH, {'id': utils.uuid4()})
        repository = client.post(REPOSITORY_PATH, gen_repo())
        distributor = client.post(
            urljoin(repository['_href'], 'distributors/'),
            gen_distributor()
        )
        cls.resources.add(repository['_href'])

        # Step 4
        client.response_handler = api.safe_handler
        path = urljoin(CONSUMER_PATH, cls.consumer['consumer']['id'] + '/')
        path = urljoin(path, 'bindings/')
        cls.request = {
            'binding_config': {'B': 21},
            'distributor_id': distributor['id'],
            'notify_agent': False,
            'repo_id': distributor['repo_id'],
        }
        cls.response = client.post(path, cls.request)
Пример #31
0
 def test_negative_create_using_spaces(self):
     """Test that spaces can not be part of ``base_path``."""
     self.try_create_distribution(base_path=utils.uuid4().replace("-", " "))
     self.try_update_distribution(base_path=utils.uuid4().replace("-", " "))
# coding=utf-8
"""Tests that copy units from one repository to another."""
import os
import subprocess
import unittest
from io import StringIO
from urllib.parse import urljoin

from packaging.version import Version

from pulp_smash import cli, config, constants, selectors, utils
from pulp_smash.tests.rpm.cli.utils import count_langpacks
from pulp_smash.tests.rpm.utils import check_issue_2277, set_up_module
from pulp_smash.utils import is_root

_REPO_ID = utils.uuid4()
"""The ID of the repository created by ``setUpModule``."""


def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip tests. Create and sync an RPM repository.

    Skip tests in this module if the RPM plugin is not installed on the target
    Pulp server. Then create an RPM repository with a feed and sync it. Test
    cases may copy data from this repository but should **not** change it.
    """
    set_up_module()
    cfg = config.get_config()
    client = cli.Client(config.get_config())

    # log in, then create repository
Пример #33
0
    def setUpClass(cls):
        """Upload puppet module to a repo, copy it to another, publish and download.

        Create two puppet repositories, both without feeds. Upload an module to
        the first repository. Copy its content to the second repository. Add
        distributors to the repositories, publish repositories and download
        modules back from them.
        """
        super().setUpClass()
        reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        cls.responses = {}
        cls.modules = []  # Raw puppet modules.

        # Download a puppet module and create two repositories.
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, gen_repo()) for _ in range(2)]
        for repo in repos:
            cls.resources.add(repo['_href'])
        client.response_handler = api.safe_handler
        cls.modules.append(utils.http_get(PUPPET_MODULE_URL_1))

        # Begin an upload request, upload a puppet module, move the puppet
        # module into a repository, and end the upload request.
        cls.responses['malloc'] = client.post(CONTENT_UPLOAD_PATH)
        cls.responses['upload'] = client.put(
            urljoin(cls.responses['malloc'].json()['_href'], '0/'),
            data=cls.modules[0],
        )
        cls.responses['import'] = client.post(
            urljoin(repos[0]['_href'], 'actions/import_upload/'),
            {
                'unit_key': {},
                'unit_type_id': 'puppet_module',
                'upload_id': cls.responses['malloc'].json()['upload_id'],
            },
        )
        cls.responses['free'] = client.delete(
            cls.responses['malloc'].json()['_href']
        )

        # Copy content from the first puppet repository to the second.
        cls.responses['copy'] = client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': repos[0]['id']}
        )

        # Add a distributor to each repository. Publish each repository.
        for key in {'distribute', 'publish'}:
            cls.responses[key] = []
        for repo in repos:
            cls.responses['distribute'].append(client.post(
                urljoin(repo['_href'], 'distributors/'),
                {
                    'auto_publish': False,
                    'distributor_id': utils.uuid4(),
                    'distributor_type_id': 'puppet_distributor',
                    'distributor_config': {
                        'serve_http': True,
                        'serve_https': True,
                        'relative_url': '/' + utils.uuid4(),
                    },
                }
            ))
            cls.responses['publish'].append(client.post(
                urljoin(repo['_href'], 'actions/publish/'),
                {'id': cls.responses['distribute'][-1].json()['id']},
            ))

        # Query both distributors using all three query forms.
        cls.responses['puppet releases'] = []
        author_name = PUPPET_MODULE_1['author'] + '/' + PUPPET_MODULE_1['name']
        for repo in repos:
            if not selectors.bug_is_fixed(1440, cls.cfg.pulp_version):
                continue
            cls.responses['puppet releases'].append(client.get(
                '/api/v1/releases.json',
                params={'module': author_name},
                auth=('.', repo['id']),
            ))
            cls.responses['puppet releases'].append(client.get(
                '/pulp_puppet/forge/repository/{}/api/v1/releases.json'
                .format(repo['id']),
                params={'module': author_name},
            ))
            if cls.cfg.pulp_version < Version('2.8'):
                continue
            cls.responses['puppet releases'].append(client.get(
                '/v3/releases',
                params={'module': author_name},
                auth=('repository', repo['id']),
            ))

        # Download each unit referenced by the queries above.
        for response in cls.responses['puppet releases']:
            body = response.json()
            if set(body.keys()) == {'pagination', 'results'}:  # Puppet >= 3.6
                path = body['results'][0]['file_uri']
            else:
                path = body[author_name][0]['file']
            cls.modules.append(client.get(path).content)

        # Search for all units in each of the two repositories.
        cls.responses['repo units'] = [
            search_units(cls.cfg, repo, {}, api.safe_handler)
            for repo in repos
        ]
Пример #34
0
 def test_var_set(self):
     """Set the environment variable."""
     os_environ = {'PULP_SMASH_CONFIG_FILE': utils.uuid4()}
     with mock.patch.dict(os.environ, os_environ, clear=True):
         config_file = config.PulpSmashConfig._get_config_file()  # pylint:disable=protected-access
     self.assertEqual(config_file, os_environ['PULP_SMASH_CONFIG_FILE'])
Пример #35
0
 def update_body():
     """Return a dict for creating a repository."""
     return {'delta': {'display_name': utils.uuid4()}}
Пример #36
0
def gen_distribution(**kwargs):
    """Return a semi-random dict for use in creating a Distribution."""
    data = {"base_path": utils.uuid4(), "name": utils.uuid4()}
    data.update(kwargs)
    return data
Пример #37
0
 def test_filter_invalid_content(self):
     """Filter repository version by invalid content."""
     with self.assertRaises(HTTPError):
         get_versions(self.repo, {"content": utils.uuid4()})
Пример #38
0
 def setUp(self):
     """Provide a server config and a repository ID."""
     self.cfg = config.get_config()
     self.repo_id = utils.uuid4()
     utils.pulp_admin_login(self.cfg)
Пример #39
0
def gen_repo(**kwargs):
    """Return a semi-random dict for use in creating a Repository."""
    data = {"name": utils.uuid4()}
    data.update(kwargs)
    return data
Пример #40
0
def gen_publisher(**kwargs):
    """Return a semi-random dict for use in creating an Publisher."""
    data = {"name": utils.uuid4()}
    data.update(kwargs)
    return data
 def test_begin_slash(self):
     """Test that slash cannot be in the begin of ``base_path``."""
     self.try_create_distribution(base_path='/' + utils.uuid4())
     self.try_update_distribution(base_path='/' + utils.uuid4())
 def test_end_slash(self):
     """Test that slash cannot be in the end of ``base_path``."""
     self.try_create_distribution(base_path=utils.uuid4() + '/')
     self.try_update_distribution(base_path=utils.uuid4() + '/')
Пример #43
0
 def setUpClass(cls):
     """Create a repository."""
     cls.client = cli.Client(config.get_config())
     cls.repo_id = utils.uuid4()
     cls.client.run('pulp-admin rpm repo create --repo-id {}'.format(
         cls.repo_id).split())
 def test_spaces(self):
     """Test that spaces can not be part of ``base_path``."""
     self.try_create_distribution(base_path=utils.uuid4().replace('-', ' '))
     self.try_update_distribution(base_path=utils.uuid4().replace('-', ' '))
Пример #45
0
 def test_negative_searches(self):
     """Search for the repository with a non-matching repository ID."""
     for command in self.gen_commands(utils.uuid4()):
         with self.subTest(command=command):
             result = self.client.run(command.split())
             self.assertEqual(result.stdout.count('Id:'), 0, result)
Пример #46
0
def gen_consumer():
    """Create a semi-random dict for use in creating a consumer."""
    return {'id': utils.uuid4()}
Пример #47
0
 def create_body():
     """Return a dict, with a feed, for creating a repository."""
     body = CrudTestCase.create_body()
     body['importer_config'] = {'feed': 'http://' + utils.uuid4()}
     return body
 def setUpClass(cls):
     """Provide a server config and a repository ID."""
     cls.cfg = config.get_config()
     cls.repo_id = utils.uuid4()
     utils.pulp_admin_login(cls.cfg)
Пример #49
0
def _gen_rel_path(segments=2):
    """Return a semi-random relative path."""
    return '/'.join((utils.uuid4() for _ in range(segments)))
Пример #50
0
 def setUp(self):
     """Generate kwargs that can be used to instantiate a completed proc."""
     self.kwargs = {
         key: utils.uuid4()
         for key in {'args', 'returncode', 'stdout', 'stderr'}
     }
    def test_positive(self):
        """Verify content is made available when appropriate.

        Specifically, do the following:

        1. Create a consumer.
        2. Bind the consumer to the repository created in :meth:`setUpClass`.
        3. Create a consumer profile where:

           * two packages are installed,
           * both packages' versions are lower than what's offered by the
             repository,
           * one of the corresponding packages in the repository has an
             applicable erratum, and
           * the other corresponding package in the repository doesn't have an
             applicable erratum.

        4. Regenerate applicability for the consumer.
        5. Fetch applicability for the consumer. Verify that both packages are
           listed as eligible for an upgrade.
        """
        # Create a consumer.
        client = api.Client(self.cfg, api.json_handler)
        consumer_id = utils.uuid4()
        consumer = client.post(CONSUMERS_PATH, {'id': consumer_id})
        self.addCleanup(client.delete, consumer['consumer']['_href'])

        # Bind the consumer.
        client.post(urljoin(CONSUMERS_PATH, consumer_id + '/bindings/'), {
            'distributor_id': self.repo['distributors'][0]['id'],
            'notify_agent': False,
            'repo_id': self.repo['id'],
        })

        # Create a consumer profile.
        rpm_with_erratum_metadata = RPM_WITH_ERRATUM_METADATA.copy()
        rpm_with_erratum_metadata['version'] = '4.0'
        rpm_without_erratum_metadata = RPM_WITHOUT_ERRATUM_METADATA.copy()
        rpm_without_erratum_metadata['version'] = '0.0.1'
        client.post(urljoin(CONSUMERS_PATH, consumer_id + '/profiles/'), {
            'content_type': 'rpm',
            'profile': [
                rpm_with_erratum_metadata,
                rpm_without_erratum_metadata,
            ]
        })

        # Regenerate applicability.
        client.post(CONSUMERS_ACTIONS_CONTENT_REGENERATE_APPLICABILITY_PATH, {
            'consumer_criteria': {'filters': {'id': {'$in': [consumer_id]}}}
        })

        # Fetch applicability.
        applicability = client.post(CONSUMERS_CONTENT_APPLICABILITY_PATH, {
            'content_types': ['rpm'],
            'criteria': {'filters': {'id': {'$in': [consumer_id]}}},
        })
        validate(applicability, CONTENT_APPLICABILITY_REPORT_SCHEMA)
        with self.subTest(comment='verify RPMs listed in report'):
            self.assertEqual(len(applicability[0]['applicability']['rpm']), 2)
        with self.subTest(comment='verify consumers listed in report'):
            self.assertEqual(applicability[0]['consumers'], [consumer_id])
Пример #52
0
def gen_distributor():
    """Return a semi-random dict for use in creating a Python distributor."""
    return {
        'distributor_id': utils.uuid4(),
        'distributor_type_id': 'python_distributor',
    }
Пример #53
0
 def setUp(self):
     """Set variables used by each test case."""
     self.cfg = config.get_config()
     self.repo = {'_href': urljoin(REPOSITORY_PATH, utils.uuid4())}
Пример #54
0
def test_rbac_distribution(
    gen_user, rpm_repository_api, rpm_rpmremote_api, rpm_publication_api, rpm_distribution_api
):
    """Test RPM distribution CRUD."""
    user_creator = gen_user(
        model_roles=[
            "rpm.rpmdistribution_creator",
            "rpm.rpmpublication_owner",
            "rpm.rpmremote_owner",
            "rpm.rpmrepository_owner",
        ]
    )
    user_viewer = gen_user(
        model_roles=[
            "rpm.viewer",
            "rpm.rpmpublication_owner",
            "rpm.rpmremote_owner",
            "rpm.rpmrepository_owner",
        ]
    )
    user_no = gen_user(
        model_roles=[
            "rpm.rpmpublication_owner",
            "rpm.rpmremote_owner",
            "rpm.rpmrepository_owner",
        ]
    )

    distribution = None
    remote_data = gen_rpm_remote(RPM_UNSIGNED_FIXTURE_URL)
    remote = rpm_rpmremote_api.create(remote_data)
    repo = rpm_repository_api.create(gen_repo())
    sync_url = RpmRepositorySyncURL(remote=remote.pulp_href)
    sync_res = rpm_repository_api.sync(repo.pulp_href, sync_url)
    monitor_task(sync_res.task)
    publish_data = RpmRpmPublication(repository=repo.pulp_href)
    publish_response = rpm_publication_api.create(publish_data)
    created_resources = monitor_task(publish_response.task).created_resources
    publication = rpm_publication_api.read(created_resources[0])

    # Create
    dist_data = RpmRpmDistribution(
        name=uuid4(), publication=publication.pulp_href, base_path=uuid4()
    )
    with user_no, pytest.raises(ApiException) as exc:
        rpm_distribution_api.create(dist_data)
    assert exc.value.status == 403

    with user_viewer, pytest.raises(ApiException) as exc:
        rpm_distribution_api.create(dist_data)
    assert exc.value.status == 403

    with user_creator:
        res = rpm_distribution_api.create(dist_data)
        distribution = rpm_distribution_api.read(monitor_task(res.task).created_resources[0])
        assert rpm_distribution_api.list().count == 1

    # Update
    dist_data_to_update = rpm_distribution_api.read(distribution.pulp_href)
    new_name = uuid4()
    dist_data_to_update.name = new_name

    with user_no, pytest.raises(ApiException) as exc:
        rpm_distribution_api.update(distribution.pulp_href, dist_data_to_update)
    assert exc.value.status == 404

    with user_viewer, pytest.raises(ApiException) as exc:
        rpm_distribution_api.update(distribution.pulp_href, dist_data_to_update)
    assert exc.value.status == 403

    with user_creator:
        res = rpm_distribution_api.update(distribution.pulp_href, dist_data_to_update)
        monitor_task(res.task)
        assert rpm_distribution_api.list().count == 1
        assert new_name in rpm_distribution_api.list().results[0].name

    # Remove
    with user_no, pytest.raises(ApiException) as exc:
        rpm_distribution_api.delete(distribution.pulp_href)
    assert exc.value.status == 404

    with user_viewer, pytest.raises(ApiException) as exc:
        rpm_distribution_api.delete(distribution.pulp_href)
    assert exc.value.status == 403

    with user_creator:
        rpm_distribution_api.delete(distribution.pulp_href)
        rpm_publication_api.delete(publication.pulp_href)

        res = rpm_repository_api.delete(repo.pulp_href)
        monitor_task(res.task)

        res = rpm_rpmremote_api.delete(remote.pulp_href)
        monitor_task(res.task)

        assert rpm_distribution_api.list().count == 0
        assert rpm_repository_api.list().count == 0
        assert rpm_rpmremote_api.list().count == 0
Пример #55
0
def gen_repo_group(**kwargs):
    """Return a semi-random dict for use in creating a RPM repository group."""
    data = {'id': utils.uuid4()}
    data.update(kwargs)
    return data
Пример #56
0
 def test_02_read_invalid_worker(self):
     """Read a task using an invalid worker name."""
     with self.assertRaises(HTTPError):
         self.filter_tasks({"worker": utils.uuid4()})
Пример #57
0
 def test_publish(self):
     """Publish a non-existent repository."""
     with self.assertRaises(HTTPError):
         publish_repo(self.cfg, self.repo, {'id': utils.uuid4()})
Пример #58
0
 def test_type(self):
     """Assert the method returns a unicode string."""
     self.assertIsInstance(utils.uuid4(), type(''))
Пример #59
0
 def test_var_set(self):
     """Set the environment variable."""
     os_environ = {"PULP_SMASH_CONFIG_FILE": utils.uuid4()}
     with mock.patch.dict(os.environ, os_environ, clear=True):
         config_file = config.PulpSmashConfig._get_config_file()
     self.assertEqual(config_file, os_environ["PULP_SMASH_CONFIG_FILE"])
Пример #60
0
def test_rbac_acs(gen_user, rpm_acs_api, rpm_rpmremote_api):
    """Test RPM ACS CRUD."""
    user_creator = gen_user(
        model_roles=[
            "rpm.rpmalternatecontentsource_creator",
            "rpm.rpmremote_owner",
        ]
    )
    user_viewer = gen_user(
        model_roles=[
            "rpm.viewer",
            "rpm.rpmremote_owner",
        ]
    )
    user_no = gen_user(
        model_roles=[
            "rpm.rpmremote_owner",
        ]
    )

    acs = None
    remote_data = gen_rpm_remote(policy="on_demand")
    remote = rpm_rpmremote_api.create(remote_data)

    acs_data = {
        "name": uuid4(),
        "remote": remote.pulp_href,
    }

    # Create
    with user_no, pytest.raises(ApiException) as exc:
        rpm_acs_api.create(acs_data)
    assert exc.value.status == 403

    with user_viewer, pytest.raises(ApiException) as exc:
        rpm_acs_api.create(acs_data)
    assert exc.value.status == 403

    with user_creator:
        acs = rpm_acs_api.create(acs_data)
        assert rpm_acs_api.list().count == 1

    # Update & Read
    with user_no, pytest.raises(ApiException) as exc:
        rpm_acs_api.read(acs.pulp_href)
    assert exc.value.status == 404

    with user_viewer, pytest.raises(ApiException) as exc:
        acs_to_update = rpm_acs_api.read(acs.pulp_href)
        acs_to_update.paths[0] = "files/"
        rpm_acs_api.update(acs_to_update.pulp_href, acs_to_update)
    assert exc.value.status == 403

    with user_creator:
        acs_to_update = rpm_acs_api.read(acs.pulp_href)
        acs_to_update.paths[0] = "files/"
        response = rpm_acs_api.update(acs_to_update.pulp_href, acs_to_update)
        monitor_task(response.task)
        assert rpm_acs_api.list().count == 1
        assert "files/" in rpm_acs_api.read(acs.pulp_href).paths

    # Remove
    with user_no, pytest.raises(ApiException) as exc:
        rpm_acs_api.delete(acs.pulp_href)
    assert exc.value.status == 404

    with user_viewer, pytest.raises(ApiException) as exc:
        rpm_acs_api.delete(acs.pulp_href)
    assert exc.value.status == 403

    with user_creator:
        rpm_acs_api.delete(acs.pulp_href)
        rpm_rpmremote_api.delete(remote.pulp_href)
        assert rpm_acs_api.list().count == 0