Example #1
0
    def setUpClass(cls):
        """Create an RPM repository, sync it, and remove some units from it.

        After creating and syncing an RPM repository, we walk through the unit
        type IDs listed in
        :data:`pulp_smash.tests.rpm.api_v2.test_unassociate.RemoveUnitsTestCase.TYPE_IDS`
        and remove on unit of each kind from the repository. We verify Pulp's
        behaviour by recording repository contents pre and post removal.
        """
        super(RemoveUnitsTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Remove one unit of each type.
        cls.units_before = _search_units(cls.cfg, repo['_href'], cls.TYPE_IDS)
        cls.units_removed = []
        for type_id in cls.TYPE_IDS:
            unit = random.choice(_get_units_by_type(cls.units_before, type_id))
            cls.units_removed.append(unit)
            _remove_unit(cls.cfg, repo['_href'], unit)
        cls.units_after = _search_units(cls.cfg, repo['_href'], cls.TYPE_IDS)
    def setUpClass(cls):
        """Create an RPM repository, sync it, and remove some units from it.

        After creating and syncing an RPM repository, we walk through the unit
        type IDs listed in
        :data:`pulp_smash.tests.rpm.api_v2.test_unassociate.RemoveUnitsTestCase.TYPE_IDS`
        and remove on unit of each kind from the repository. We verify Pulp's
        behaviour by recording repository contents pre and post removal.
        """
        super(RemoveUnitsTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Remove one unit of each type.
        cls.units_before = _search_units(cls.cfg, repo['_href'], cls.TYPE_IDS)
        cls.units_removed = []
        for type_id in cls.TYPE_IDS:
            unit = random.choice(_get_units_by_type(cls.units_before, type_id))
            cls.units_removed.append(unit)
            _remove_unit(cls.cfg, repo['_href'], unit)
        cls.units_after = _search_units(cls.cfg, repo['_href'], cls.TYPE_IDS)
    def test_all(self):
        """Create and sync a puppet repository with no feed."""
        cfg = config.get_config()
        if selectors.bug_is_untestable(2628, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2628')

        # Create a repository.
        client = api.Client(cfg, api.json_handler)
        repo = client.post(REPOSITORY_PATH, gen_repo())
        self.addCleanup(client.delete, repo['_href'])

        # Sync the repository. An error *should* occur. We just want the error
        # to be sane.
        with self.assertRaises(exceptions.TaskReportError) as err:
            utils.sync_repo(cfg, repo)
        with self.subTest(comment='check task "error" field'):
            self.assertIsNotNone(err.exception.task['error'])
            self.assertNotEqual(
                err.exception.task['error']['description'],
                "'NoneType' object has no attribute 'endswith'")
            self.assertNotEqual(err.exception.task['error']['code'], 'PLP0000')
        with self.subTest(comment='check task "exception" field'):
            self.assertIsNone(err.exception.task['exception'])
        with self.subTest(comment='check task "traceback" field'):
            self.assertIsNone(err.exception.task['traceback'])
Example #4
0
def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip the tests in this module. Create and sync an RPM repo.

    Skip this module of tests if Pulp is older than version 2.9. (See `Pulp
    #1724`_.) Then create an RPM repository with a feed and sync it. Test cases
    may copy data from this repository but should **not** change it.

    .. _Pulp #1724: https://pulp.plan.io/issues/1724
    """
    set_up_module()
    cfg = config.get_config()
    if cfg.version < Version("2.9"):
        raise unittest2.SkipTest("This module requires Pulp 2.9 or greater.")

    # Create and sync a repository. If this set-up procedure grows, consider
    # implementing a stack of tear-down actions
    client = api.Client(cfg, api.json_handler)
    body = gen_repo()
    body["importer_config"]["feed"] = RPM_FEED_URL
    global _REPO  # pylint:disable=global-statement
    _REPO = client.post(REPOSITORY_PATH, body)
    try:
        utils.sync_repo(cfg, _REPO["_href"])
    except (exceptions.CallReportError, exceptions.TaskReportError, exceptions.TaskTimedOutError):
        client.delete(_REPO["_href"])
        raise
Example #5
0
    def setUpClass(cls):
        """Create, sync and delete an RPM repository.

        Doing this provides orphans that the remaining test methods can make
        use of. If this method fails, it's possible that other repositories
        exist with references to the same content units.
        """
        super(OrphansTestCase, cls).setUpClass()

        # Create orphans.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        try:
            utils.sync_repo(cls.cfg, repo['_href'])
        finally:
            client.delete(repo['_href'])

        # Verify that orphans are present. Support for langpack content units
        # was added in Pulp 2.9.
        orphans = client.get(ORPHANS_PATH)
        expected_count = 39
        if cls.cfg.version >= Version('2.9'):
            expected_count += 1
        actual_count = _count_orphans(orphans)
        if expected_count != actual_count:
            # We can't use fail(), as it's an instance method.
            raise AssertionError(
                'Test case setup failed. We attempted to create {} orphans, '
                'but actually created {}. Orphans: {}'.format(
                    expected_count, actual_count, orphans))
Example #6
0
def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip the tests in this module. Create and sync an RPM repo.

    Skip this module of tests if Pulp is older than version 2.9. (See `Pulp
    #1724`_.) Then create an RPM repository with a feed and sync it. Test cases
    may copy data from this repository but should **not** change it.

    .. _Pulp #1724: https://pulp.plan.io/issues/1724
    """
    set_up_module()
    cfg = config.get_config()
    if cfg.pulp_version < Version('2.9'):
        raise unittest.SkipTest('This module requires Pulp 2.9 or greater.')
    if check_issue_2277(cfg):
        raise unittest.SkipTest('https://pulp.plan.io/issues/2277')

    # Create and sync a repository.
    client = api.Client(cfg, api.json_handler)
    _CLEANUP.append((client.delete, [ORPHANS_PATH], {}))
    body = gen_repo()
    body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
    _REPO.clear()
    _REPO.update(client.post(REPOSITORY_PATH, body))
    _CLEANUP.append((client.delete, [_REPO['_href']], {}))
    try:
        utils.sync_repo(cfg, _REPO)
    except (exceptions.CallReportError, exceptions.TaskReportError,
            exceptions.TaskTimedOutError):
        tearDownModule()
        raise
    def setUpClass(cls):
        """Create a schedule to publish a repo, verify the ``total_run_count``.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 2 minutes
        4. Wait for 130 seconds and read the schedule to get the number of
           "publish" runs
        """
        super(ScheduledPublishTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body["importer_config"]["feed"] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo["_href"])
        utils.sync_repo(cls.cfg, repo["_href"])

        # Schedule a publish to run every 2 minutes
        distributor = gen_distributor()
        client.post(urljoin(repo["_href"], "distributors/"), distributor)
        scheduling_url = "/".join(["distributors", distributor["distributor_id"], "schedules/publish/"])
        schedule_path = urljoin(repo["_href"], scheduling_url)
        schedule = client.post(schedule_path, {"schedule": "PT2M"})

        # Wait for publish to run
        time.sleep(130)

        # Read the schedule
        cls.response = client.get(schedule["_href"])
Example #8
0
    def setUpClass(cls):
        """Create a schedule to publish a repo, verify the ``total_run_count``.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 2 minutes
        4. Wait for 130 seconds and read the schedule to get the number of
           "publish" runs
        """
        super(ScheduledPublishTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo)

        # Schedule a publish to run every 2 minutes
        distributor = gen_distributor()
        client.post(urljoin(repo['_href'], 'distributors/'), distributor)
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        schedule_path = urljoin(repo['_href'], scheduling_url)
        schedule = client.post(schedule_path, {'schedule': 'PT2M'})

        # Wait for publish to run
        time.sleep(130)

        # Read the schedule
        cls.response = client.get(schedule['_href'])
Example #9
0
 def test_update_tag_another_repo(self):
     """Check if tagging fail for a manifest from another repo."""
     other = create_docker_repo(self.cfg, 'library/swarm')
     self.addCleanup(api.Client(self.cfg).delete, other['_href'])
     utils.sync_repo(self.cfg, other)
     other = api.Client(self.cfg,
                        api.json_handler).get(other['_href'],
                                              params={'details': True})
     other_manifest = random.choice(
         utils.search_units(self.cfg, other,
                            {'type_ids': ['docker_manifest']}))
     tag_name = utils.uuid4()
     with self.assertRaises(TaskReportError) as context:
         import_upload(
             self.cfg, self.repo, {
                 'unit_type_id': 'docker_tag',
                 'unit_key': {
                     'repo_id': self.repo['id'],
                     'name': tag_name,
                 },
                 'unit_metadata': {
                     'name': tag_name,
                     'digest': other_manifest['metadata']['digest'],
                 },
             })
     self.assertEqual(
         'Manifest with digest {} could not be found in repository {}.'.
         format(other_manifest['metadata']['digest'], self.repo['id']),
         context.exception.task['error']['description'])
     self.assertEqual(len(self._get_tags()), len(self.tags))
Example #10
0
    def test_all(self):
        """Create, sync and publish an OSTree repository.

        Verify that:

        * The distributor's ``last_publish`` attribute is ``None`` after the
          sync. This demonstrates that ``auto_publish`` correctly defaults to
          ``False``.
        * The distributor's ``last_publish`` attribute is not ``None`` after
          the publish.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        # Create a repository.
        body = gen_repo()
        body['importer_config']['feed'] = OSTREE_FEED
        body['importer_config']['branches'] = [OSTREE_BRANCH]
        body['distributors'].append(gen_distributor())
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Sync the repository.
        utils.sync_repo(cfg, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after sync'):
            self.assertIsNone(repo['distributors'][0]['last_publish'])

        # Publish the repository.
        utils.publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after publish'):
            self.assertIsNotNone(repo['distributors'][0]['last_publish'])
Example #11
0
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the published repository.
        5. Download the same RPM to ensure it is served by the cache.
        """
        super(OnDemandTestCase, cls).setUpClass()
        if check_issue_3104(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/3104')

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo)

        # Read the repository.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()

        # Download the same RPM twice.
        cls.rpm = get_unit(cls.cfg, cls.repo['distributors'][0], RPM)
        cls.same_rpm = get_unit(cls.cfg, cls.repo['distributors'][0], RPM)
 def test_all(self):
     """Execute the test case business logic."""
     cfg = config.get_config()
     self.check_issue_2363(cfg)
     repo_href = self.create_repo(cfg, RPM_MIRRORLIST_BAD, _gen_rel_url())
     with self.assertRaises(TaskReportError):
         utils.sync_repo(cfg, repo_href)
Example #13
0
    def test_02_create_immediate_child(self):
        """Create a child repository with the "immediate" download policy.

        Sync the child repository, and verify it has the same contents as the
        root repository.
        """
        # We disable SSL validation for a practical reason: each HTTPS feed
        # must have a certificate to work, which is burdensome to do here.
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = urljoin(
            self.cfg.get_base_url(),
            _PUBLISH_DIR +
            self.repos['root']['distributors'][0]['config']['relative_url'],
        )
        body['importer_config']['remove_missing'] = True
        body['importer_config']['ssl_validation'] = False
        self.repos['immediate'] = client.post(REPOSITORY_PATH, body)
        self.repos['immediate'] = (_get_details(self.cfg,
                                                self.repos['immediate']))
        utils.sync_repo(self.cfg, self.repos['immediate'])

        # Verify the two repositories have the same contents.
        root_ids = _get_rpm_ids(_get_rpms(self.cfg, self.repos['root']))
        immediate_ids = _get_rpm_ids(
            _get_rpms(self.cfg, self.repos['immediate']))
        self.assertEqual(root_ids, immediate_ids)
Example #14
0
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the published repository.
        5. Download the same RPM to ensure it is served by the cache.
        """
        super(OnDemandTestCase, cls).setUpClass()

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Read the repository.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()

        # Download the same RPM twice.
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
        cls.same_rpm = client.get(path)
    def setUpClass(cls):
        """Create a schedule to publish the repository.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 30 seconds
        """
        super(CreateSuccessTestCase, cls).setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Schedule a publish to run every 30 seconds
        distributor = gen_distributor()
        distributor_url = urljoin(repo['_href'], 'distributors/')
        client.post(
            distributor_url,
            distributor
        )
        scheduling_url = urljoin(
            distributor_url,
            '{}/schedules/publish/'.format(distributor['distributor_id']),
        )
        cls.response = client.post(
            scheduling_url,
            {'schedule': 'PT30S'}
        )
        cls.attrs = cls.response.json()
    def do_test(self, cfg, repo_registry_id):
        """Execute the test with the given ``repo_registry_id``."""
        # Create, sync and publish.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            'upstream_name': get_upstream_name(cfg),
        })
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config']['repo-registry-id'] = (
            repo_registry_id)
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)
        cli.GlobalServiceManager(cfg).restart(('httpd', ))  # restart Crane

        # Get and inspect /crane/repositories/v2.
        client = self.make_crane_client(cfg)
        repos = client.get('/crane/repositories/v2')
        self.assertIn(repo_registry_id, repos.keys())
        self.assertFalse(repos[repo_registry_id]['protected'])
Example #17
0
    def test_02_create_on_demand_child(self):
        """Create a child repository with the "on_demand" download policy.

        Also, let the repository's "remove_missing" option be true. Then, sync
        the child repository, and verify it has the same contents as the root
        repository.
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = urljoin(
            self.cfg.get_base_url(),
            _PUBLISH_DIR +
            self.repos['root']['distributors'][0]['config']['relative_url'],
        )
        body['importer_config']['download_policy'] = 'on_demand'
        body['importer_config']['remove_missing'] = True
        body['importer_config']['ssl_validation'] = False
        self.repos['on demand'] = client.post(REPOSITORY_PATH, body)
        self.repos['on demand'] = (_get_details(self.cfg,
                                                self.repos['on demand']))
        utils.sync_repo(self.cfg, self.repos['on demand'])

        # Verify the two repositories have the same contents.
        root_ids = _get_rpm_ids(_get_rpms(self.cfg, self.repos['root']))
        on_demand_ids = _get_rpm_ids(
            _get_rpms(self.cfg, self.repos['on demand']))
        self.assertEqual(root_ids, on_demand_ids)
def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip the tests in this module. Create and sync an RPM repo.

    Skip this module of tests if Pulp is older than version 2.9. (See `Pulp
    #1724`_.) Then create an RPM repository with a feed and sync it. Test cases
    may copy data from this repository but should **not** change it.

    .. _Pulp #1724: https://pulp.plan.io/issues/1724
    """
    set_up_module()
    cfg = config.get_config()
    if cfg.version < Version('2.9'):
        raise unittest.SkipTest('This module requires Pulp 2.9 or greater.')
    if check_issue_2277(cfg):
        raise unittest.SkipTest('https://pulp.plan.io/issues/2277')

    # Create and sync a repository. If this set-up procedure grows, consider
    # implementing a stack of tear-down actions
    client = api.Client(cfg, api.json_handler)
    body = gen_repo()
    body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
    global _REPO  # pylint:disable=global-statement
    _REPO = client.post(REPOSITORY_PATH, body)
    try:
        utils.sync_repo(cfg, _REPO['_href'])
    except (exceptions.CallReportError, exceptions.TaskReportError,
            exceptions.TaskTimedOutError):
        client.delete(_REPO['_href'])
        raise
Example #19
0
    def test_all(self):
        """Create, sync and publish an OSTree repository.

        Verify that:

        * The distributor's ``last_publish`` attribute is ``None`` after the
          sync. This demonstrates that ``auto_publish`` correctly defaults to
          ``False``.
        * The distributor's ``last_publish`` attribute is not ``None`` after
          the publish.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)

        # Create a repository.
        body = gen_repo()
        body['importer_config']['feed'] = OSTREE_FEED
        body['importer_config']['branches'] = OSTREE_BRANCHES
        body['distributors'].append(gen_distributor())
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])

        # Sync the repository.
        utils.sync_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after sync'):
            self.assertIsNone(repo['distributors'][0]['last_publish'])

        # Publish the repository.
        utils.publish_repo(cfg, repo)
        repo = client.get(repo['_href'], params={'details': True})
        with self.subTest(comment='verify last_publish after publish'):
            self.assertIsNotNone(repo['distributors'][0]['last_publish'])
    def setUpClass(cls):
        """Upload an erratum to a repo, publish, and download the erratum.

        Do the following:

        1. Create an RPM repository with a distributor.
        2. Upload an erratum to the repository.
        3. Publish the repository.
        4. Fetch the repository's ``updateinfo.xml`` file.
        """
        super(UploadErratumTestCase, cls).setUpClass()
        cls.erratum = gen_erratum()

        # Create an RPM repository with a feed and distributor.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])

        # Sync content into the repository, and give it an erratum.
        utils.sync_repo(cls.cfg, repo['_href'])
        utils.upload_import_erratum(cls.cfg, cls.erratum, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})

        # Publish the repository, and fetch and parse updateinfo.xml
        distributor = repo['distributors'][0]
        client.post(
            urljoin(repo['_href'], 'actions/publish/'),
            {'id': distributor['id']},
        )
        path = urljoin('/pulp/repos/', distributor['config']['relative_url'])
        cls.updateinfo = get_repomd_xml(cls.cfg, path, 'updateinfo')
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the published repository.
        5. Download the same RPM to ensure it is served by the cache.
        """
        super(OnDemandTestCase, cls).setUpClass()

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Read the repository.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()

        # Download the same RPM twice.
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
        cls.same_rpm = client.get(path)
Example #22
0
    def setUpClass(cls):
        """Create a schedule to publish the repository.

        Do the following:

        1. Create a repository with a valid feed
        2. Sync it
        3. Schedule publish to run every 30 seconds
        """
        super(CreateSuccessTestCase, cls).setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo)

        # Schedule a publish to run every 30 seconds
        distributor = gen_distributor()
        distributor_url = urljoin(repo['_href'], 'distributors/')
        client.post(distributor_url, distributor)
        scheduling_url = urljoin(
            distributor_url,
            '{}/schedules/publish/'.format(distributor['distributor_id']),
        )
        cls.response = client.post(scheduling_url, {'schedule': 'PT30S'})
        cls.attrs = cls.response.json()
    def setUpClass(cls):
        """Create, sync and publish a repository. Fetch its ``comps.xml``."""
        super(SyncRepoTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo.
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])

        # Sync and publish the repo.
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cls.cfg, repo['_href'])
        client.post(
            urljoin(repo['_href'], 'actions/publish/'),
            {'id': repo['distributors'][0]['id']},
        )
        repo = client.get(repo['_href'], params={'details': True})

        # Fetch and parse comps.xml.
        dist = repo['distributors'][0]
        dist_url = urljoin('/pulp/repos/', dist['config']['relative_url'])
        cls.root_element = get_repomd_xml(cls.cfg, dist_url, 'group')
        cls.xml_as_str = ElementTree.tostring(cls.root_element)
Example #24
0
def setUpModule():  # pylint:disable=invalid-name
    """Possibly skip the tests in this module. Create and sync an RPM repo.

    Skip this module of tests if Pulp is older than version 2.9. (See `Pulp
    #1724`_.) Then create an RPM repository with a feed and sync it. Test cases
    may copy data from this repository but should **not** change it.

    .. _Pulp #1724: https://pulp.plan.io/issues/1724
    """
    set_up_module()
    cfg = config.get_config()
    if cfg.version < Version('2.9'):
        raise unittest.SkipTest('This module requires Pulp 2.9 or greater.')
    if check_issue_2277(cfg):
        raise unittest.SkipTest('https://pulp.plan.io/issues/2277')

    # Create and sync a repository.
    client = api.Client(cfg, api.json_handler)
    _CLEANUP.append((client.delete, [ORPHANS_PATH], {}))
    body = gen_repo()
    body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
    _REPO.clear()
    _REPO.update(client.post(REPOSITORY_PATH, body))
    _CLEANUP.append((client.delete, [_REPO['_href']], {}))
    try:
        utils.sync_repo(cfg, _REPO['_href'])
    except (exceptions.CallReportError, exceptions.TaskReportError,
            exceptions.TaskTimedOutError):
        tearDownModule()
        raise
    def test_01_set_up(self):
        """Create, sync and publish a repository.

        Specifically, do the following:

        1. Create, sync and publish a Docker repository. Let the repository's
           feed reference a v1 Docker registry.
        2. Make Crane immediately re-read the metadata files published by Pulp.
           (Restart Apache.)
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1':
            True,
            'enable_v2':
            False,
            'feed':
            DOCKER_V1_FEED_URL,
            'upstream_name':
            get_upstream_name(self.cfg),
        })
        body['distributors'] = [gen_distributor()]
        type(self).repo = client.post(REPOSITORY_PATH, body)
        type(self).repo = client.get(self.repo['_href'],
                                     params={'details': True})
        utils.sync_repo(self.cfg, self.repo)
        utils.publish_repo(self.cfg, self.repo)

        # Make Crane re-read metadata. (Now!)
        cli.GlobalServiceManager(self.cfg).restart(('httpd', ))
 def test_all(self):
     """Execute the test case business logic."""
     cfg = config.get_config()
     self.check_issue_2363(cfg)
     repo_href = self.create_repo(cfg, RPM_MIRRORLIST_BAD, _gen_rel_url())
     with self.assertRaises(TaskReportError):
         utils.sync_repo(cfg, repo_href)
    def test_all(self):
        """Work with an image whose name has no namespace.

        Create, sync and publish a Docker repository whose ``UPSTREAM_NAME``
        doesn't include a namespace. A typical Docker image has a name like
        "library/busybox." When a non-namespaced image name like "busybox" is
        given, a prefix of "library" is assumed.
        """
        cfg = config.get_config()
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            'upstream_name': 'busybox',
        })
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)

        # Make Crane read the metadata. (Now!)
        cli.GlobalServiceManager(cfg).restart(('httpd', ))

        # Get and inspect /crane/repositories/v2.
        if (cfg.pulp_version >= Version('2.14')
                and selectors.bug_is_testable(2723, cfg.pulp_version)):
            client = self.make_crane_client(cfg)
            repo_id = repo['id']
            repos = client.get('/crane/repositories/v2')
            self.assertIn(repo_id, repos.keys())
            self.assertFalse(repos[repo_id]['protected'])
    def setUpClass(cls):
        """Create, sync and publish a repository. Fetch its ``comps.xml``."""
        super(SyncRepoTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo.
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])

        # Sync and publish the repo.
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cls.cfg, repo['_href'])
        client.post(
            urljoin(repo['_href'], 'actions/publish/'),
            {'id': repo['distributors'][0]['id']},
        )
        repo = client.get(repo['_href'], params={'details': True})

        # Fetch and parse comps.xml.
        dist = repo['distributors'][0]
        dist_url = urljoin('/pulp/repos/', dist['config']['relative_url'])
        cls.root_element = get_repomd_xml(cls.cfg, dist_url, 'group')
        cls.xml_as_str = ElementTree.tostring(cls.root_element)
    def test_01_set_up(self):
        """Create, sync and publish a Docker repository.

        Specifically, do the following:

        1. Create, sync and publish a Docker repository. Let the repository's
           upstream name reference a repository that has an image with a
           manifest list and no amd64/linux build.
        2. Make Crane immediately re-read the metadata files published by Pulp.
           (Restart Apache.)
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'].update({
            'enable_v1': False,
            'enable_v2': True,
            'feed': DOCKER_V2_FEED_URL,
            # DOCKER_UPSTREAM_NAME (dmage/manifest-list-test) has an image
            # without any amd64/linux build. However, it has a v1 manifest.
            'upstream_name': 'dmage/busybox',
        })
        body['distributors'] = [gen_distributor()]
        type(self).repo = client.post(REPOSITORY_PATH, body)
        type(self).repo = client.get(self.repo['_href'],
                                     params={'details': True})
        utils.sync_repo(self.cfg, self.repo)
        utils.publish_repo(self.cfg, self.repo)

        # Make Crane read metadata. (Now!)
        cli.GlobalServiceManager(self.cfg).restart(('httpd', ))
Example #30
0
 def test_all(self):
     """Sync two repositories w/identical content but differing layouts."""
     cfg = config.get_config()
     if check_issue_3104(cfg):
         self.skipTest('https://pulp.plan.io/issues/3104')
     if check_issue_2798(cfg):
         self.skipTest('https://pulp.plan.io/issues/2798')
     if check_issue_2354(cfg):
         self.skipTest('https://pulp.plan.io/issues/2354')
     if (utils.os_is_f26(cfg)
             and selectors.bug_is_untestable(3036, cfg.pulp_version)):
         # Here, the calls to get_unit() cause pulp_streamer.service to die
         # without logging out anything. In Pulp #3036, certain actions
         # cause pulp_streamer.service to die while logging out a core dump.
         # Thus, this test failure might be unrelated to Pulp #3036.
         self.skipTest('https://pulp.plan.io/issues/3036')
     repos = [
         self.create_repo(cfg, feed, 'on_demand')
         for feed in (RPM_ALT_LAYOUT_FEED_URL, RPM_UNSIGNED_FEED_URL)
     ]
     for repo in repos:
         utils.sync_repo(cfg, repo)
     for repo in repos:
         utils.publish_repo(cfg, repo)
     rpms = []
     for repo in repos:
         with self.subTest(repo=repo):
             rpms.append(
                 get_unit(cfg, repo['distributors'][0], RPM).content)
     self.assertEqual(len(rpms), len(repos))
     self.assertEqual(rpms[0], rpms[1], repos)
Example #31
0
    def setUpClass(cls):
        """Create, sync and delete an RPM repository.

        Doing this provides orphans that the remaining test methods can make
        use of. If this method fails, it's possible that other repositories
        exist with references to the same content units.
        """
        super(OrphansTestCase, cls).setUpClass()

        # Create orphans.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        try:
            utils.sync_repo(cls.cfg, repo['_href'])
        finally:
            client.delete(repo['_href'])

        # Verify that orphans are present. Support for langpack content units
        # was added in Pulp 2.9.
        orphans = client.get(ORPHANS_PATH)
        expected_count = 39
        if cls.cfg.version >= Version('2.9'):
            expected_count += 1
        actual_count = _count_orphans(orphans)
        if expected_count != actual_count:
            # We can't use fail(), as it's an instance method.
            raise AssertionError(
                'Test case setup failed. We attempted to create {} orphans, '
                'but actually created {}. Orphans: {}'
                .format(expected_count, actual_count, orphans)
            )
Example #32
0
    def test_all(self):
        """Sync a repo whose updateinfo file has multiple pkglist sections.

        Do the following:

        1. Create and sync a repository with an importer and distributor.
           Ensure the importer's feed is set to
           :data:`pulp_smash.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL`.
        2. Publish the repository, and fetch and parse its updateinfo file.
        3. Verify the updateinfo contains the correct number of ``<pkglists>``
           sections, with the correct contents in each.
        """
        cfg = config.get_config()
        if selectors.bug_is_untestable(2227, cfg.version):
            self.skipTest('https://pulp.plan.io/issues/2277')

        # Create and sync a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        utils.sync_repo(cfg, repo['_href'])

        # Publish the repository, and fetch and parse its updateinfo file.
        repo = client.get(repo['_href'], params={'details': True})
        self.assertEqual(len(repo['distributors']), 1, repo['distributors'])
        distributor = repo['distributors'][0]
        client.post(
            urljoin(repo['_href'], 'actions/publish/'),
            {'id': distributor['id']},
        )
        root_element = get_repomd_xml(
            cfg,
            urljoin('/pulp/repos/', distributor['config']['relative_url']),
            'updateinfo'
        )

        # Verify the contents of the updateinfo file.
        debug = ElementTree.tostring(root_element)
        pkglists = root_element.find('update').findall('pkglist')
        self.assertEqual(len(pkglists), 3, debug)

        collections = [pkglist.find('collection') for pkglist in pkglists]
        names = {collection.find('name').text for collection in collections}
        self.assertEqual(names, {'1', '2', '3'}, debug)

        packages = {
            collection.find('package').find('filename').text
            for collection in collections
        }
        self.assertEqual(packages, {
            'penguin-0.9.1-1.noarch.rpm',
            'shark-0.1-1.noarch.rpm',
            'walrus-5.21-1.noarch.rpm',
        }, debug)
 def setUpClass(cls):
     """Create a repository with a feed and sync it."""
     super(PackagesDirectoryTestCase, cls).setUpClass()
     client = api.Client(cls.cfg, api.json_handler)
     body = gen_repo()
     body['importer_config']['feed'] = RPM_FEED_URL
     cls.repo_href = client.post(REPOSITORY_PATH, body)['_href']
     cls.resources.add(cls.repo_href)
     utils.sync_repo(cls.cfg, cls.repo_href)
Example #34
0
    def test_all(self):
        """Re-sync a child repository with the ``remove_missing`` enabled."""
        repos = []
        cfg = config.get_config()
        if selectors.bug_is_untestable(2616, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2616')

        # Create 1st repo, sync and publish it.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repos.append(client.post(REPOSITORY_PATH, body))
        self.addCleanup(client.delete, repos[0]['_href'])
        repos[0] = _get_details(cfg, repos[0])
        utils.sync_repo(cfg, repos[0])
        utils.publish_repo(cfg, repos[0])

        # Create 2nd repo, sync.
        body = gen_repo()
        body['importer_config']['feed'] = urljoin(
            cfg.get_base_url(),
            _PUBLISH_DIR +
            repos[0]['distributors'][0]['config']['relative_url'],
        )
        body['importer_config']['remove_missing'] = True
        repos.append(client.post(REPOSITORY_PATH, body))
        self.addCleanup(client.delete, repos[1]['_href'])
        repos[1] = _get_details(cfg, repos[1])
        utils.sync_repo(cfg, repos[1])

        # Remove an arbitrary number of units from 1st repo, re-publish it.
        units = _get_rpms(cfg, repos[0])
        marked_units = random.sample(units, random.randint(1, len(units)))
        for marked_unit in marked_units:
            criteria = {
                'filters': {
                    'unit': {
                        'name': marked_unit['metadata']['name']
                    }
                },
                'type_ids': [marked_unit['unit_type_id']],
            }
            client.post(
                urljoin(repos[0]['_href'], 'actions/unassociate/'),
                {'criteria': criteria},
            )
        utils.publish_repo(cfg, repos[0])

        # Re-sync 2nd repo.
        report = utils.sync_repo(cfg, repos[1])
        tasks = tuple(api.poll_spawned_tasks(cfg, report.json()))
        self.assertEqual(
            tasks[0]['result']['removed_count'],
            len(marked_units),
        )
Example #35
0
    def setUpClass(cls):
        """Create several schedules.

        Each schedule is created to test a different failure scenario.
        """
        super(CreateFailureTestCase, cls).setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo)

        # Add a distibutor
        distributor = gen_distributor()
        client.post(urljoin(repo['_href'], 'distributors/'), distributor)
        client.response_handler = api.echo_handler
        cls.bodies = (
            {
                'schedule': None
            },  # 400
            {
                'unknown': 'parameter',
                'schedule': 'PT30S'
            },  # 400
            ['Incorrect data type'],  # 400
            {
                'missing_required_keys': 'schedule'
            },  # 400
            {
                'schedule': 'PT30S'
            },  # tests incorrect distributor in url, 404
            {
                'schedule': 'PT30S'
            },  # tests incorrect repo in url, 404
        )
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        bad_distributor_url = '/'.join(
            ['distributors',
             utils.uuid4(), 'schedules/publish/'])
        bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()])
        cls.paths = (urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], scheduling_url),
                     urljoin(repo['_href'], bad_distributor_url),
                     urljoin(bad_repo_path, scheduling_url))
        cls.status_codes = (400, 400, 400, 400, 404, 404)
        cls.responses = [
            client.post(path, req_body)
            for path, req_body in zip(cls.paths, cls.bodies)
        ]
Example #36
0
    def test_04_sync_immediate_child(self):
        """Sync the "immediate" repository.

        Verify it has the same contents as the root repository.
        """
        utils.sync_repo(self.cfg, self.repos['immediate'])
        root_ids = _get_rpm_ids(_get_rpms(self.cfg, self.repos['root']))
        immediate_ids = _get_rpm_ids(
            _get_rpms(self.cfg, self.repos['immediate']))
        self.assertEqual(root_ids, immediate_ids)
Example #37
0
    def test_04_sync_on_demand_child(self):
        """Sync the "on demand" repository.

        Verify it has the same contents as the root repository.
        """
        utils.sync_repo(self.cfg, self.repos['on demand'])
        root_ids = _get_rpm_ids(_get_rpms(self.cfg, self.repos['root']))
        on_demand_ids = _get_rpm_ids(
            _get_rpms(self.cfg, self.repos['on demand']))
        self.assertEqual(root_ids, on_demand_ids)
Example #38
0
 def setUpClass(cls):
     """Create and sync a repository."""
     if inspect.getmro(cls)[0] == BaseSearchTestCase:
         raise unittest.SkipTest('Abstract base class.')
     super(BaseSearchTestCase, cls).setUpClass()
     body = gen_repo()
     body['importer_config']['feed'] = cls.get_feed_url()
     cls.repo = api.Client(cls.cfg).post(REPOSITORY_PATH, body).json()
     cls.resources.add(cls.repo['_href'])
     utils.sync_repo(cls.cfg, cls.repo['_href'])
Example #39
0
 def setUp(self):
     """Create and sync a docker repository."""
     super().setUp()
     self.repo = create_docker_repo(self.cfg, get_upstream_name(self.cfg))
     self.addCleanup(api.Client(self.cfg).delete, self.repo['_href'])
     utils.sync_repo(self.cfg, self.repo)
     self.repo = api.Client(self.cfg,
                            api.json_handler).get(self.repo['_href'],
                                                  params={'details': True})
     self.tags = self._get_tags()
Example #40
0
 def setUpClass(cls):
     """Create and sync a repository."""
     if inspect.getmro(cls)[0] == BaseSearchTestCase:
         raise unittest.SkipTest("Abstract base class.")
     super(BaseSearchTestCase, cls).setUpClass()
     body = gen_repo()
     body["importer_config"]["feed"] = cls.get_feed_url()
     cls.repo = api.Client(cls.cfg).post(REPOSITORY_PATH, body).json()
     cls.resources.add(cls.repo["_href"])
     utils.sync_repo(cls.cfg, cls.repo["_href"])
    def test_all(self):
        """Use the ``force_full`` RPM rsync distributor option."""
        cfg = config.get_config()
        api_client = api.Client(cfg)
        cli_client = cli.Client(cfg)
        sudo = '' if utils.is_root(cfg) else 'sudo '

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo_href = self.make_repo(
            cfg, {
                'remote': {
                    'host': urlparse(cfg.base_url).netloc,
                    'root': '/home/' + ssh_user,
                    'ssh_identity_file': ssh_identity_file,
                    'ssh_user': ssh_user,
                }
            })
        utils.sync_repo(cfg, repo_href)

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = _get_dists_by_type_id(cfg, repo_href)
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            api_client.post(urljoin(repo_href, 'actions/publish/'),
                            {'id': distribs[type_id]['id']})
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])

        # Remove all files from the target directory, and publish again. Verify
        # that the RPM rsync distributor didn't place any files.
        cmd = sudo + 'rm -rf /home/{}/content'.format(ssh_user)
        cli_client.run(cmd.split())
        self.verify_publish_is_skip(
            cfg,
            api_client.post(urljoin(repo_href, 'actions/publish/'), {
                'id': distribs['rpm_rsync_distributor']['id']
            }).json())
        cmd = sudo + 'ls -1 /home/{}'.format(ssh_user)
        dirs = set(cli_client.run(cmd.split()).stdout.strip().split('\n'))
        self.assertNotIn('content', dirs)

        # Publish the repo with ``force_full`` set to true. Verify that the RPM
        # rsync distributor placed files.
        if selectors.bug_is_untestable(2202, cfg.version):
            return
        api_client.post(
            urljoin(repo_href, 'actions/publish/'), {
                'id': distribs['rpm_rsync_distributor']['id'],
                'override_config': {
                    'force_full': True
                },
            })
        self.verify_remote_units_path(cfg, distribs['rpm_rsync_distributor'])
Example #42
0
    def test_all(self):
        """Sync a repo whose updateinfo file has multiple pkglist sections.

        Specifically, do the following:

        1. Create, sync and publish an RPM repository whose feed is set to
           :data:`pulp_smash.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL`.
        2. Fetch and parse the published repository's ``updateinfo.xml`` file.

        Verify that the ``updateinfo.xml`` file has three packages whose
        ``<filename>`` elements have the following text:

        * penguin-0.9.1-1.noarch.rpm
        * shark-0.1-1.noarch.rpm
        * walrus-5.21-1.noarch.rpm

        Note that Pulp is free to change the structure of a source repository
        at will. For example, the source repository has three ``<collection>``
        elements, the published repository can have one, two or three
        ``<collection>`` elements. Assertions are not made about these details.
        """
        cfg = config.get_config()
        if check_issue_3104(cfg):
            self.skipTest('https://pulp.plan.io/issues/3104')
        if selectors.bug_is_untestable(2227, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2277')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)

        # Fetch and parse ``updateinfo.xml``.
        updates_element = (get_repodata(cfg, repo['distributors'][0],
                                        'updateinfo'))

        # Verify the ``updateinfo.xml`` file.
        debug = ElementTree.tostring(updates_element)
        filename_elements = (updates_element.findall(
            'update/pkglist/collection/package/filename'))
        filenames = [
            filename_element.text for filename_element in filename_elements
        ]
        filenames.sort()
        self.assertEqual(filenames, [
            'penguin-0.9.1-1.noarch.rpm',
            'shark-0.1-1.noarch.rpm',
            'walrus-5.21-1.noarch.rpm',
        ], debug)
Example #43
0
    def test_all(self):
        """Test whether copied files retain their original mtime.

        This test targets the following issues:

        * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_
        * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_

        Do the following:

        1. Create, sync and publish a repository, with ``generate_sqlite`` set
           to true.
        2. Get the ``mtime`` of the sqlite files.
        3. Upload an RPM package into the repository, and sync the repository.
        4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes
           are the same.
        """
        cfg = config.get_config()
        if selectors.bug_is_untestable(2783, cfg.pulp_version):
            self.skipTest('https://pulp.plan.io/issues/2783')

        # Create, sync and publish a repository.
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        body['distributors'][0]['distributor_config']['generate_sqlite'] = True
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(cfg, repo)
        utils.publish_repo(cfg, repo)

        # Get the mtime of the sqlite files.
        cli_client = cli.Client(cfg, cli.echo_handler)
        cmd = '' if utils.is_root(cfg) else 'sudo '
        cmd += "bash -c \"stat --format %Y '{}'/*\"".format(
            os.path.join(
                _PATH,
                repo['distributors'][0]['config']['relative_url'],
                'repodata',
            ))
        mtimes_pre = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())

        # Upload to the repo, and sync it.
        rpm = utils.http_get(RPM_SIGNED_URL)
        utils.upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo)
        utils.sync_repo(cfg, repo)

        # Get the mtime of the sqlite files again.
        time.sleep(1)
        mtimes_post = (
            cli_client.machine.session().run(cmd)[1].strip().split().sort())
        self.assertEqual(mtimes_pre, mtimes_post)
 def setUpClass(cls):
     """Create a repository with a feed and sync it."""
     super(PackagesDirectoryTestCase, cls).setUpClass()
     if check_issue_2277(cls.cfg):
         raise unittest.SkipTest('https://pulp.plan.io/issues/2277')
     client = api.Client(cls.cfg, api.json_handler)
     body = gen_repo()
     body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
     cls.repo_href = client.post(REPOSITORY_PATH, body)['_href']
     cls.resources.add(cls.repo_href)
     utils.sync_repo(cls.cfg, cls.repo_href)
 def test_all(self):
     """Execute the test case business logic."""
     cfg = config.get_config()
     self.check_issue_2277(cfg)
     self.check_issue_2321(cfg)
     repo_href = self.create_repo(cfg, RPM_MIRRORLIST_MIXED, _gen_rel_url())
     utils.sync_repo(cfg, repo_href)
     self.publish_repo(cfg, repo_href)
     actual_rpm = self.get_unit(cfg, repo_href, RPM)
     target_rpm = utils.http_get(RPM_UNSIGNED_URL)
     self.assertEqual(actual_rpm, target_rpm)
 def setUpClass(cls):
     """Create and sync a repository."""
     super(ForceFullTestCase, cls).setUpClass()
     client = api.Client(cls.cfg, api.json_handler)
     body = gen_repo()
     body['importer_config']['feed'] = RPM_FEED_URL
     body['distributors'] = [gen_distributor()]
     repo = client.post(REPOSITORY_PATH, body)
     cls.resources.add(repo['_href'])
     utils.sync_repo(cls.cfg, repo['_href'])
     cls.repo = client.get(repo['_href'], params={'details': True})
    def setUpClass(cls):  # pylint:disable=arguments-differ
        """Create two repositories, first is feed of second one.

        Provides server config and set of iterable to delete. Following steps
        are executed:

        1. Create repository foo with feed, sync and publish it.
        2. Create repository bar with foo as a feed with
           ``retain_old_count=0``.
        3. Run sync of repo foo.
        4. Get information on both repositories.
        """
        super(RetainOldCountTestCase, cls).setUpClass()
        if selectors.bug_is_untestable(2277, cls.cfg.version):
            raise unittest.SkipTest('https://pulp.plan.io/issues/2277')
        client = api.Client(cls.cfg)
        cls.responses = {}
        hrefs = []  # repository hrefs

        # Create and sync the first repository.
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href'])
        cls.responses['first sync'] = utils.sync_repo(cls.cfg, hrefs[0])

        # Add distributor and publish
        cls.responses['distribute'] = client.post(
            urljoin(hrefs[0], 'distributors/'),
            gen_distributor(),
        )
        cls.responses['publish'] = client.post(
            urljoin(hrefs[0], 'actions/publish/'),
            {'id': cls.responses['distribute'].json()['id']},
        )

        # Create and sync the second repository. Ensure it fetches content from
        # the first, and that the `retain_old_count` option is set correctly.
        # We disable SSL validation for a practical reason: each HTTPS feed
        # must have a certificate to work, which is burdensome to do here.
        body = gen_repo()
        body['importer_config']['feed'] = urljoin(
            cls.cfg.base_url,
            _PUBLISH_DIR +
            cls.responses['distribute'].json()['config']['relative_url'],
        )
        body['importer_config']['retain_old_count'] = 0  # see docstring
        body['importer_config']['ssl_validation'] = False
        hrefs.append(client.post(REPOSITORY_PATH, body).json()['_href'])
        cls.responses['second sync'] = utils.sync_repo(cls.cfg, hrefs[1])

        # Read the repositories and mark them for deletion.
        cls.repos = [client.get(href).json() for href in hrefs]
        cls.resources.update(set(hrefs))
    def _create_sync_repo(self, feed_url):
        """Create a repository with the given feed and sync it.

        Return the repository's href.
        """
        self.addCleanup(self.client.delete, ORPHANS_PATH)
        body = gen_repo()
        body['importer_config']['feed'] = feed_url
        repo_href = self.client.post(REPOSITORY_PATH, body)['_href']
        self.addCleanup(self.client.delete, repo_href)
        utils.sync_repo(self.cfg, repo_href)
        return repo_href
    def setUpClass(cls):
        """Create an RPM repository and issue a task to download the repo.

        Do the following:

        1. Reset Pulp.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Trigger a repository download.
        5. Corrupt a file in the repository.
        6. Trigger a repository download, without unit verification.
        7. Trigger a repository download, with unit verification.
        """
        super(FixFileCorruptionTestCase, cls).setUpClass()
        if (selectors.bug_is_untestable(1905, cls.cfg.version) and
                _os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Ensure Pulp is empty of units otherwise we might just associate pre-
        # existing units.
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Trigger a repository download. Read the repo before and after.
        api_client = api.Client(cls.cfg, api.json_handler)
        download_path = urljoin(repo['_href'], 'actions/download/')
        params = {'details': True}
        cls.repo_pre_download = api_client.get(repo['_href'], params=params)
        api_client.post(download_path, {'verify_all_units': False})
        cls.repo_post_download = api_client.get(repo['_href'], params=params)

        # Corrupt an RPM. The file is there, but the checksum isn't right.
        rpm_abs_path = cls.get_rpm_abs_path()
        cli_client = cli.Client(cls.cfg)
        sudo = '' if utils.is_root(cls.cfg) else 'sudo '
        checksum_cmd = (sudo + 'sha256sum ' + rpm_abs_path).split()
        cls.sha_pre_corruption = cli_client.run(checksum_cmd).stdout.strip()
        cli_client.run((sudo + 'rm ' + rpm_abs_path).split())
        cli_client.run((sudo + 'touch ' + rpm_abs_path).split())
        cli_client.run((sudo + 'chown apache:apache ' + rpm_abs_path).split())
        cls.sha_post_corruption = cli_client.run(checksum_cmd).stdout.strip()

        # Trigger repository downloads that don't and do checksum files, resp.
        api_client.post(download_path, {'verify_all_units': False})
        cls.unverified_file_sha = cli_client.run(checksum_cmd).stdout.strip()
        api_client.post(download_path, {'verify_all_units': True})
        cls.verified_file_sha = cli_client.run(checksum_cmd).stdout.strip()
    def setUpClass(cls):
        """Create several schedules.

        Each schedule is created to test a different failure scenario.
        """
        super(CreateFailureTestCase, cls).setUpClass()
        client = api.Client(cls.cfg)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body).json()
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Add a distibutor
        distributor = gen_distributor()
        client.post(
            urljoin(repo['_href'], 'distributors/'),
            distributor
        )
        client.response_handler = api.echo_handler
        cls.bodies = (
            {'schedule': None},  # 400
            {'unknown': 'parameter', 'schedule': 'PT30S'},  # 400
            ['Incorrect data type'],  # 400
            {'missing_required_keys': 'schedule'},  # 400
            {'schedule': 'PT30S'},  # tests incorrect distributor in url, 404
            {'schedule': 'PT30S'},  # tests incorrect repo in url, 404
        )
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        bad_distributor_url = '/'.join([
            'distributors', utils.uuid4(), 'schedules/publish/'
        ])
        bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()])
        cls.paths = (
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], scheduling_url),
            urljoin(repo['_href'], bad_distributor_url),
            urljoin(bad_repo_path, scheduling_url)
        )
        cls.status_codes = (400, 400, 400, 400, 404, 404)
        cls.responses = [
            client.post(path, req_body) for path, req_body in zip(
                cls.paths, cls.bodies)
        ]
Example #51
0
    def _create_sync_repo(self, cfg):
        """Create and sync a repository. Return a detailed dict of repo info.

        Also, schedule the repository for deletion with ``addCleanup()``.
        """
        client = api.Client(cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        utils.sync_repo(cfg, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        return client.get(repo['_href'], params={'details': True})
Example #52
0
    def setUpClass(cls):
        """Create an RPM repo, sync it, delete the repo, remove orphans."""
        super(OrphanRemoveAllTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body["importer_config"]["feed"] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        utils.sync_repo(cls.cfg, repo["_href"])

        cls.num_orphans_pre_repo_del = _count_orphans(client)
        client.delete(repo["_href"])
        cls.num_orphans_post_repo_del = _count_orphans(client)
        client.delete(ORPHANS_PATH)
        cls.num_orphans_after_rm = _count_orphans(client)
    def setUpClass(cls):
        """Create three schedules and read, update and delete them."""
        super(ReadUpdateDeleteTestCase, cls).setUpClass()
        client = api.Client(cls.cfg, api.json_handler)

        # Create a repo with a valid feed and sync it
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Create schedules
        distributor = gen_distributor()
        client.post(
            urljoin(repo['_href'], 'distributors/'),
            distributor
        )
        scheduling_url = '/'.join([
            'distributors', distributor['distributor_id'], 'schedules/publish/'
        ])
        scheduling_path = urljoin(repo['_href'], scheduling_url)
        cls.schedules = tuple((
            client.post(scheduling_path, {'schedule': 'PT30S'})
            for _ in range(3)
        ))
        cls.responses = {}
        client.response_handler = api.safe_handler

        # Attributes that may be changed after creation
        cls.mutable_attrs = [
            'consecutive_failures', 'last_run_at', 'last_updated', 'next_run',
            'first_run', 'remaining_runs', 'total_run_count'
        ]

        # Read the first schedule
        cls.responses['read_one'] = client.get(cls.schedules[0]['_href'])

        # Read all schedules for the repo
        cls.responses['read_many'] = client.get(scheduling_path)

        # Update the second schedule
        cls.update_body = {'schedule': 'PT1M'}
        cls.responses['update'] = client.put(
            cls.schedules[1]['_href'], cls.update_body
        )

        # Delete the third schedule
        cls.responses['delete'] = client.delete(cls.schedules[2]['_href'])
Example #54
0
    def setUpClass(cls):
        """Create RPM repository, delete a package, and publish the repository.

        More specifically, do the following:

        1. Create an RPM repository.
        2. Add a YUM distributor.
        3. Sync the created repository.
        4. Remove the ``gorilla`` package
        5. Publish the repository. Fetch the ``updateinfo.xml`` file from the
           distributor (via ``repomd.xml``), and parse it.
        """
        super(ErratumPkgListCountTestCase, cls).setUpClass()

        # Create a repository, sync it, and add a yum distributor.
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])
        distributor = client.post(
            urljoin(repo['_href'], 'distributors/'),
            gen_distributor(),
        )

        # Remove the gorilla package unit
        client.post(
            urljoin(repo['_href'], 'actions/unassociate/'),
            {'criteria': get_unit_unassociate_criteria(RPM_ERRATUM_RPM_NAME)},
        )

        # Publish the repository
        client.post(
            urljoin(repo['_href'], 'actions/publish/'),
            {'id': distributor['id']},
        )

        # Fetch and parse updateinfo.xml (or updateinfo.xml.gz), via repomd.xml
        root_element = get_repomd_xml(
            cls.cfg,
            urljoin('/pulp/repos/', distributor['config']['relative_url']),
            'updateinfo'
        )

        # Fetch the erratum and erratum pkglist for the gorilla package
        updates = _get_updates_by_id(root_element)
        erratum = updates[RPM_ERRATUM_ID]
        cls.erratum_pkglists = erratum.findall('pkglist')
    def test_all(self):
        """Use the ``force_full`` RPM rsync distributor option."""
        cfg = config.get_config()
        api_client = api.Client(cfg)
        cli_client = cli.Client(cfg)
        sudo = '' if utils.is_root(cfg) else 'sudo '

        # Create a user and repo with an importer and distribs. Sync the repo.
        ssh_user, priv_key = self.make_user(cfg)
        ssh_identity_file = self.write_private_key(cfg, priv_key)
        repo_href = self.make_repo(cfg, {'remote': {
            'host': urlparse(cfg.base_url).netloc,
            'root': '/home/' + ssh_user,
            'ssh_identity_file': ssh_identity_file,
            'ssh_user': ssh_user,
        }})
        utils.sync_repo(cfg, repo_href)

        # Publish the repo with the yum and rsync distributors, respectively.
        # Verify that the RPM rsync distributor has placed files.
        distribs = _get_dists_by_type_id(cfg, repo_href)
        self.maybe_disable_selinux(cfg, 2199)
        for type_id in ('yum_distributor', 'rpm_rsync_distributor'):
            api_client.post(urljoin(repo_href, 'actions/publish/'), {
                'id': distribs[type_id]['id']
            })
        self.verify_files_in_dir(cfg, distribs['rpm_rsync_distributor'])

        # Remove all files from the target directory, and publish again. Verify
        # that the RPM rsync distributor didn't place any files.
        cmd = sudo + 'rm -rf /home/{}/content'.format(ssh_user)
        cli_client.run(cmd.split())
        self.verify_publish_is_skip(cfg, api_client.post(
            urljoin(repo_href, 'actions/publish/'),
            {'id': distribs['rpm_rsync_distributor']['id']}
        ).json())
        cmd = sudo + 'ls -1 /home/{}'.format(ssh_user)
        dirs = set(cli_client.run(cmd.split()).stdout.strip().split('\n'))
        self.assertNotIn('content', dirs)

        # Publish the repo with ``force_full`` set to true. Verify that the RPM
        # rsync distributor placed files.
        if selectors.bug_is_untestable(2202, cfg.version):
            return
        api_client.post(urljoin(repo_href, 'actions/publish/'), {
            'id': distribs['rpm_rsync_distributor']['id'],
            'override_config': {'force_full': True},
        })
        self.verify_files_in_dir(cfg, distribs['rpm_rsync_distributor'])
    def create_sync_repo(self, importer_config):
        """Create and sync a repository, and schedule tear-down logic.

        The tear-down logic deletes the created repository and all orphans.
        (``addCleanup()`` is used.) An up-to-date dict of information about the
        repo is returned.
        """
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = importer_config
        self.addCleanup(client.delete, ORPHANS_PATH)
        repo_href = client.post(REPOSITORY_PATH, body)['_href']
        self.addCleanup(client.delete, repo_href)
        utils.sync_repo(self.cfg, repo_href)
        return client.get(repo_href)
Example #57
0
    def health_check(self):
        """Execute step three of the test plan."""
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config']['feed'] = RPM_SIGNED_FEED_URL
        body['distributors'] = [gen_distributor()]
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(client.delete, repo['_href'])
        repo = client.get(repo['_href'], params={'details': True})
        utils.sync_repo(self.cfg, repo['_href'])
        utils.publish_repo(self.cfg, repo)
        pulp_rpm = get_unit(self.cfg, repo, RPM).content

        # Does this RPM match the original RPM?
        rpm = utils.http_get(RPM_SIGNED_URL)
        self.assertEqual(rpm, pulp_rpm)
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "background" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the repository.
        """
        super(BackgroundTestCase, cls).setUpClass()
        if (selectors.bug_is_untestable(1905, cls.cfg.version) and
                _os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Required to ensure content is actually downloaded.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'background')
        cls.resources.add(repo['_href'])
        report = utils.sync_repo(cls.cfg, repo['_href']).json()

        # Record the tasks spawned when syncing the repository, and the state
        # of the repository itself after the sync.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()
        cls.tasks = tuple(api.poll_spawned_tasks(cls.cfg, report))

        # Download an RPM.
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
Example #59
0
    def health_check(self):
        """Execute step three of the test plan."""
        client = api.Client(self.cfg, api.json_handler)
        body = gen_repo()
        body["importer_config"]["feed"] = RPM_FEED_URL
        repo = client.post(REPOSITORY_PATH, body)
        self.addCleanup(api.Client(self.cfg).delete, repo["_href"])
        utils.sync_repo(self.cfg, repo["_href"])
        distributor = client.post(urljoin(repo["_href"], "distributors/"), gen_distributor())
        client.post(urljoin(repo["_href"], "actions/publish/"), {"id": distributor["id"]})
        client.response_handler = api.safe_handler
        url = urljoin("/pulp/repos/", distributor["config"]["relative_url"])
        url = urljoin(url, RPM)
        pulp_rpm = client.get(url).content

        # Does this RPM match the original RPM?
        rpm = utils.http_get(RPM_URL)
        self.assertEqual(rpm, pulp_rpm)