Exemple #1
0
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "background" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the repository.
        """
        super(BackgroundTestCase, cls).setUpClass()
        if (selectors.bug_is_untestable(1905, cls.cfg.version)
                and _os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Required to ensure content is actually downloaded.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'background')
        cls.resources.add(repo['_href'])
        report = utils.sync_repo(cls.cfg, repo['_href']).json()

        # Record the tasks spawned when syncing the repository, and the state
        # of the repository itself after the sync.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()
        cls.tasks = tuple(api.poll_spawned_tasks(cls.cfg, report))

        # Download an RPM.
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
    def setUpClass(cls):
        """Create an RPM repository. Upload the same content into it twice."""
        super(DuplicateUploadsTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)

        # Download content.
        client = api.Client(cls.cfg)
        cls.rpm = client.get(urljoin(RPM_FEED_URL, RPM)).content

        # Create a feed-less repository.
        client = api.Client(cls.cfg, api.json_handler)
        repo = client.post(
            REPOSITORY_PATH, {
                'id': utils.uuid4(),
                'importer_config': {},
                'importer_type_id': 'yum_importer',
                'notes': {
                    '_repo-type': 'rpm-repo'
                },
            })
        cls.resources.add(repo['_href'])

        # Upload content and import it into the repository. Do it twice!
        cls.call_reports = tuple((_upload_import_rpm(cls.cfg, cls.rpm,
                                                     repo['_href'])
                                  for _ in range(2)))
    def setUpClass(cls):
        """Create and sync two puppet repositories."""
        super(SyncValidFeedTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        bodies = tuple((_gen_repo() for _ in range(2)))
        for i, query in enumerate(
            (_PUPPET_QUERY, _PUPPET_QUERY.replace('-', '_'))):
            bodies[i]['importer_config'] = {
                'feed': _PUPPET_FEED,
                'queries': [query],
            }
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, body) for body in bodies]
        cls.resources.update({repo['_href'] for repo in repos})

        # Trigger repository sync and collect completed tasks.
        cls.reports = []  # raw responses to "start syncing" commands
        cls.tasks = []  # completed tasks
        client.response_handler = api.echo_handler
        for repo in repos:
            report = client.post(urljoin(repo['_href'], 'actions/sync/'))
            report.raise_for_status()
            cls.reports.append(report)
            for task in api.poll_spawned_tasks(cls.cfg, report.json()):
                cls.tasks.append(task)
    def setUpClass(cls):
        """Create and sync two puppet repositories."""
        super(SyncValidFeedTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        bodies = tuple((_gen_repo() for _ in range(2)))
        for i, query in enumerate((
                _PUPPET_QUERY, _PUPPET_QUERY.replace('-', '_'))):
            bodies[i]['importer_config'] = {
                'feed': _PUPPET_FEED,
                'queries': [query],
            }
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, body) for body in bodies]
        cls.resources.update({repo['_href'] for repo in repos})

        # Trigger repository sync and collect completed tasks.
        cls.reports = []  # raw responses to "start syncing" commands
        cls.tasks = []  # completed tasks
        client.response_handler = api.echo_handler
        for repo in repos:
            report = client.post(urljoin(repo['_href'], 'actions/sync/'))
            report.raise_for_status()
            cls.reports.append(report)
            for task in utils.poll_spawned_tasks(cls.cfg, report.json()):
                cls.tasks.append(task)
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "background" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the repository.
        """
        super(BackgroundTestCase, cls).setUpClass()
        if (selectors.bug_is_untestable(1905, cls.cfg.version) and
                _os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Required to ensure content is actually downloaded.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'background')
        cls.resources.add(repo['_href'])
        report = utils.sync_repo(cls.cfg, repo['_href']).json()

        # Record the tasks spawned when syncing the repository, and the state
        # of the repository itself after the sync.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()
        cls.tasks = tuple(api.poll_spawned_tasks(cls.cfg, report))

        # Download an RPM.
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
Exemple #6
0
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the published repository.
        5. Download the same RPM to ensure it is served by the cache.
        """
        super(OnDemandTestCase, cls).setUpClass()

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Read the repository.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()

        # Download the same RPM twice.
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
        cls.same_rpm = client.get(path)
Exemple #7
0
 def setUp(self):
     """Make sure Pulp and Squid are reset."""
     if check_issue_3104(self.cfg):
         self.skipTest('https://pulp.plan.io/issues/3104')
     # Required to ensure content is actually downloaded.
     utils.reset_squid(self.cfg)
     utils.reset_pulp(self.cfg)
Exemple #8
0
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the published repository.
        5. Download the same RPM to ensure it is served by the cache.
        """
        super(OnDemandTestCase, cls).setUpClass()
        if check_issue_3104(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/3104')

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo)

        # Read the repository.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()

        # Download the same RPM twice.
        cls.rpm = get_unit(cls.cfg, cls.repo['distributors'][0], RPM)
        cls.same_rpm = get_unit(cls.cfg, cls.repo['distributors'][0], RPM)
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Download an RPM from the published repository.
        5. Download the same RPM to ensure it is served by the cache.
        """
        super(OnDemandTestCase, cls).setUpClass()

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Read the repository.
        client = api.Client(cls.cfg)
        cls.repo = client.get(repo['_href'], params={'details': True}).json()

        # Download the same RPM twice.
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
        cls.same_rpm = client.get(path)
    def tearDown(self):
        """Reset the number of Pul pworkers, and reset Pulp.

        Reset Pulp because :meth:`test_all` may break Pulp.
        """
        sudo = () if utils.is_root(self.cfg) else ('sudo', )
        # Delete last line from file.
        cli.Client(self.cfg).run(sudo + ('sed', '-i', '$d', _PULP_WORKERS_CFG))
        utils.reset_pulp(self.cfg)
 def setUpClass(cls):
     """Create an RPM repository. Upload an RPM into it twice."""
     super(DuplicateUploadsTestCase, cls).setUpClass()
     utils.reset_pulp(cls.cfg)
     unit = utils.http_get(RPM_SIGNED_URL)
     unit_type_id = 'rpm'
     client = api.Client(cls.cfg, api.json_handler)
     repo_href = client.post(REPOSITORY_PATH, gen_repo())['_href']
     cls.resources.add(repo_href)
     cls.upload_import_unit_args = (cls.cfg, unit, unit_type_id, repo_href)
 def setUpClass(cls):
     """Create an RPM repository. Upload an RPM into it twice."""
     super(DuplicateUploadsTestCase, cls).setUpClass()
     utils.reset_pulp(cls.cfg)
     unit = utils.http_get(RPM_URL)
     unit_type_id = 'rpm'
     client = api.Client(cls.cfg, api.json_handler)
     repo_href = client.post(REPOSITORY_PATH, gen_repo())['_href']
     cls.resources.add(repo_href)
     cls.upload_import_unit_args = (cls.cfg, unit, unit_type_id, repo_href)
Exemple #13
0
def setUpModule():  # pylint:disable=invalid-name
    """Execute ``pulp-admin login`` and reset Pulp.

    For :class:`RemovedContentTestCase` to function correctly, we require that
    all of the content units on Pulp's filesystem belong to the repository
    created by that test case. Resetting Pulp guarantees that this is so.
    Ideally, all test cases would clean up after themselves so that no resets
    are necessary.
    """
    cfg = config.get_config()
    set_up_module()
    utils.reset_pulp(cfg)
    utils.pulp_admin_login(cfg)
Exemple #14
0
    def setUpClass(cls):
        """Create an RPM repository and issue a task to download the repo.

        Do the following:

        1. Reset Pulp.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Trigger a repository download.
        5. Corrupt a file in the repository.
        6. Trigger a repository download, without unit verification.
        7. Trigger a repository download, with unit verification.
        """
        super(FixFileCorruptionTestCase, cls).setUpClass()
        if (selectors.bug_is_untestable(1905, cls.cfg.version)
                and _os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Ensure Pulp is empty of units otherwise we might just associate pre-
        # existing units.
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Trigger a repository download. Read the repo before and after.
        api_client = api.Client(cls.cfg, api.json_handler)
        download_path = urljoin(repo['_href'], 'actions/download/')
        params = {'details': True}
        cls.repo_pre_download = api_client.get(repo['_href'], params=params)
        api_client.post(download_path, {'verify_all_units': False})
        cls.repo_post_download = api_client.get(repo['_href'], params=params)

        # Corrupt an RPM. The file is there, but the checksum isn't right.
        rpm_abs_path = cls.get_rpm_abs_path()
        cli_client = cli.Client(cls.cfg)
        sudo = '' if utils.is_root(cls.cfg) else 'sudo '
        checksum_cmd = (sudo + 'sha256sum ' + rpm_abs_path).split()
        cls.sha_pre_corruption = cli_client.run(checksum_cmd).stdout.strip()
        cli_client.run((sudo + 'rm ' + rpm_abs_path).split())
        cli_client.run((sudo + 'touch ' + rpm_abs_path).split())
        cli_client.run((sudo + 'chown apache:apache ' + rpm_abs_path).split())
        cls.sha_post_corruption = cli_client.run(checksum_cmd).stdout.strip()

        # Trigger repository downloads that don't and do checksum files, resp.
        api_client.post(download_path, {'verify_all_units': False})
        cls.unverified_file_sha = cli_client.run(checksum_cmd).stdout.strip()
        api_client.post(download_path, {'verify_all_units': True})
        cls.verified_file_sha = cli_client.run(checksum_cmd).stdout.strip()
    def setUpClass(cls):
        """Create an RPM repository and issue a task to download the repo.

        Do the following:

        1. Reset Pulp.
        2. Create a repository with the "on demand" download policy.
        3. Sync and publish the repository.
        4. Trigger a repository download.
        5. Corrupt a file in the repository.
        6. Trigger a repository download, without unit verification.
        7. Trigger a repository download, with unit verification.
        """
        super(FixFileCorruptionTestCase, cls).setUpClass()
        if (selectors.bug_is_untestable(1905, cls.cfg.version) and
                _os_is_rhel6(cls.cfg)):
            raise unittest.SkipTest('https://pulp.plan.io/issues/1905')

        # Ensure Pulp is empty of units otherwise we might just associate pre-
        # existing units.
        utils.reset_pulp(cls.cfg)

        # Create, sync and publish a repository.
        repo = _create_repo(cls.cfg, 'on_demand')
        cls.resources.add(repo['_href'])
        utils.sync_repo(cls.cfg, repo['_href'])

        # Trigger a repository download. Read the repo before and after.
        api_client = api.Client(cls.cfg, api.json_handler)
        download_path = urljoin(repo['_href'], 'actions/download/')
        params = {'details': True}
        cls.repo_pre_download = api_client.get(repo['_href'], params=params)
        api_client.post(download_path, {'verify_all_units': False})
        cls.repo_post_download = api_client.get(repo['_href'], params=params)

        # Corrupt an RPM. The file is there, but the checksum isn't right.
        rpm_abs_path = cls.get_rpm_abs_path()
        cli_client = cli.Client(cls.cfg)
        sudo = '' if utils.is_root(cls.cfg) else 'sudo '
        checksum_cmd = (sudo + 'sha256sum ' + rpm_abs_path).split()
        cls.sha_pre_corruption = cli_client.run(checksum_cmd).stdout.strip()
        cli_client.run((sudo + 'rm ' + rpm_abs_path).split())
        cli_client.run((sudo + 'touch ' + rpm_abs_path).split())
        cli_client.run((sudo + 'chown apache:apache ' + rpm_abs_path).split())
        cls.sha_post_corruption = cli_client.run(checksum_cmd).stdout.strip()

        # Trigger repository downloads that don't and do checksum files, resp.
        api_client.post(download_path, {'verify_all_units': False})
        cls.unverified_file_sha = cli_client.run(checksum_cmd).stdout.strip()
        api_client.post(download_path, {'verify_all_units': True})
        cls.verified_file_sha = cli_client.run(checksum_cmd).stdout.strip()
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository. Sync and publish it using the 'on_demand'
           download policy.
        3. Download an RPM from the published repository.
        4. Download the same RPM to ensure it is served by the cache.
        """
        super(SyncOnDemandTestCase, cls).setUpClass()
        if cls.cfg.version < Version('2.8'):
            raise unittest2.SkipTest('This test requires Pulp 2.8 or greater.')

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create a repository
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = {
            'download_policy': 'on_demand',
            'feed': RPM_FEED_URL,
        }
        distributor = gen_distributor()
        distributor['auto_publish'] = True
        distributor['distributor_config']['relative_url'] = body['id']
        body['distributors'] = [distributor]

        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])

        # Sync and read the repository
        sync_path = urljoin(repo['_href'], 'actions/sync/')
        client.post(sync_path, {'override_config': {}})
        cls.repo = client.get(repo['_href'], params={'details': True})

        # Download the same RPM twice.
        client.response_handler = api.safe_handler
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
        cls.same_rpm = client.get(path)
Exemple #17
0
    def setUpClass(cls):
        """Create an RPM repository with a valid feed and sync it.

        Do the following:

        1. Reset Pulp, including the Squid cache.
        2. Create a repository. Sync and publish it using the 'on_demand'
           download policy.
        3. Download an RPM from the published repository.
        4. Download the same RPM to ensure it is served by the cache.
        """
        super(SyncOnDemandTestCase, cls).setUpClass()
        if cls.cfg.version < Version('2.8'):
            raise unittest2.SkipTest('This test requires Pulp 2.8 or greater.')

        # Ensure `locally_stored_units` is 0 before we start.
        utils.reset_squid(cls.cfg)
        utils.reset_pulp(cls.cfg)

        # Create a repository
        client = api.Client(cls.cfg, api.json_handler)
        body = gen_repo()
        body['importer_config'] = {
            'download_policy': 'on_demand',
            'feed': RPM_FEED_URL,
        }
        distributor = gen_distributor()
        distributor['auto_publish'] = True
        distributor['distributor_config']['relative_url'] = body['id']
        body['distributors'] = [distributor]

        repo = client.post(REPOSITORY_PATH, body)
        cls.resources.add(repo['_href'])

        # Sync and read the repository
        sync_path = urljoin(repo['_href'], 'actions/sync/')
        client.post(sync_path, {'override_config': {}})
        cls.repo = client.get(repo['_href'], params={'details': True})

        # Download the same RPM twice.
        client.response_handler = api.safe_handler
        path = urljoin('/pulp/repos/', repo['id'] + '/')
        path = urljoin(path, RPM)
        cls.rpm = client.get(path)
        cls.same_rpm = client.get(path)
    def setUpClass(cls):
        """Create an RPM repository. Upload the same content into it twice."""
        super(DuplicateUploadsTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)

        # Download content.
        client = api.Client(cls.cfg)
        cls.rpm = client.get(urljoin(RPM_FEED_URL, RPM)).content

        # Create a feed-less repository.
        client = api.Client(cls.cfg, api.json_handler)
        repo = client.post(
            REPOSITORY_PATH,
            {
                "id": utils.uuid4(),
                "importer_config": {},
                "importer_type_id": "yum_importer",
                "notes": {"_repo-type": "rpm-repo"},
            },
        )
        cls.resources.add(repo["_href"])

        # Upload content and import it into the repository. Do it twice!
        cls.call_reports = tuple((_upload_import_rpm(cls.cfg, cls.rpm, repo["_href"]) for _ in range(2)))
    def setUpClass(cls):
        """Upload puppet module to a repo, copy it to another, publish and download.

        Create two puppet repositories, both without feeds. Upload an module to
        the first repository. Copy its content to the second repository. Add
        distributors to the repositories, publish repositories and download
        modules back from them.
        """
        super(PublishTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        cls.responses = {}
        cls.modules = []  # Raw puppet modules.

        # Download a puppet module and create two repositories.
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, gen_repo()) for _ in range(2)]
        for repo in repos:
            cls.resources.add(repo['_href'])
        client.response_handler = api.safe_handler
        cls.modules.append(utils.http_get(PUPPET_MODULE_URL_1))

        # Begin an upload request, upload a puppet module, move the puppet
        # module into a repository, and end the upload request.
        cls.responses['malloc'] = client.post(CONTENT_UPLOAD_PATH)
        cls.responses['upload'] = client.put(
            urljoin(cls.responses['malloc'].json()['_href'], '0/'),
            data=cls.modules[0],
        )
        cls.responses['import'] = client.post(
            urljoin(repos[0]['_href'], 'actions/import_upload/'),
            {
                'unit_key': {},
                'unit_type_id': 'puppet_module',
                'upload_id': cls.responses['malloc'].json()['upload_id'],
            },
        )
        cls.responses['free'] = client.delete(
            cls.responses['malloc'].json()['_href'])

        # Copy content from the first puppet repository to the second.
        cls.responses['copy'] = client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': repos[0]['id']})

        # Add a distributor to each repository. Publish each repository.
        for key in {'distribute', 'publish'}:
            cls.responses[key] = []
        for repo in repos:
            cls.responses['distribute'].append(
                client.post(
                    urljoin(repo['_href'], 'distributors/'), {
                        'auto_publish': False,
                        'distributor_id': utils.uuid4(),
                        'distributor_type_id': 'puppet_distributor',
                        'distributor_config': {
                            'serve_http': True,
                            'serve_https': True,
                            'relative_url': '/' + utils.uuid4(),
                        },
                    }))
            cls.responses['publish'].append(
                client.post(
                    urljoin(repo['_href'], 'actions/publish/'),
                    {'id': cls.responses['distribute'][-1].json()['id']},
                ))

        # Query both distributors using all three query forms.
        cls.responses['puppet releases'] = []
        author_name = PUPPET_MODULE_1['author'] + '/' + PUPPET_MODULE_1['name']
        for repo in repos:
            if selectors.bug_is_untestable(1440, cls.cfg.pulp_version):
                continue
            cls.responses['puppet releases'].append(
                client.get(
                    '/api/v1/releases.json',
                    params={'module': author_name},
                    auth=('.', repo['id']),
                ))
            cls.responses['puppet releases'].append(
                client.get(
                    '/pulp_puppet/forge/repository/{}/api/v1/releases.json'.
                    format(repo['id']),
                    params={'module': author_name},
                ))
            if cls.cfg.pulp_version < Version('2.8'):
                continue
            cls.responses['puppet releases'].append(
                client.get(
                    '/v3/releases',
                    params={'module': author_name},
                    auth=('repository', repo['id']),
                ))

        # Download each unit referenced by the queries above.
        for response in cls.responses['puppet releases']:
            body = response.json()
            if set(body.keys()) == {'pagination', 'results'}:  # Puppet >= 3.6
                path = body['results'][0]['file_uri']
            else:
                path = body[author_name][0]['file']
            cls.modules.append(client.get(path).content)

        # Search for all units in each of the two repositories.
        cls.responses['repo units'] = [
            utils.search_units(cls.cfg, repo, {}, api.safe_handler)
            for repo in repos
        ]
    def setUpClass(cls):
        """Upload an RPM to a repo, copy it to another, publish and download.

        Do the following:

        1. Create two RPM repositories, both without feeds.
        2. Upload an RPM to the first repository.
        3. Associate the first repository with the second, causing the RPM to
           be copied.
        4. Add a distributor to both repositories and publish them.
        """
        super(UploadRpmTestCase, cls).setUpClass()
        if check_issue_2387(cls.cfg):
            raise unittest.SkipTest('https://pulp.plan.io/issues/2387')
        utils.reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        cls.responses = {}

        # Download an RPM and create two repositories.
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, gen_repo()) for _ in range(2)]
        for repo in repos:
            cls.resources.add(repo['_href'])
        client.response_handler = api.safe_handler
        cls.rpm = utils.http_get(RPM_SIGNED_URL)

        # Begin an upload request, upload an RPM, move the RPM into a
        # repository, and end the upload request.
        cls.responses['malloc'] = client.post(CONTENT_UPLOAD_PATH)
        cls.responses['upload'] = client.put(
            urljoin(cls.responses['malloc'].json()['_href'], '0/'),
            data=cls.rpm,
        )
        cls.responses['import'] = client.post(
            urljoin(repos[0]['_href'], 'actions/import_upload/'),
            {
                'unit_key': {},
                'unit_type_id': 'rpm',
                'upload_id': cls.responses['malloc'].json()['upload_id'],
            },
        )
        cls.responses['free'] = client.delete(
            cls.responses['malloc'].json()['_href'],
        )

        # Copy content from the first repository to the second.
        cls.responses['copy'] = client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': repos[0]['id']}
        )

        # Add a distributor to and publish both repositories.
        cls.responses['distribute'] = []
        cls.responses['publish'] = []
        for repo in repos:
            cls.responses['distribute'].append(client.post(
                urljoin(repo['_href'], 'distributors/'),
                gen_distributor(),
            ))
            cls.responses['publish'].append(client.post(
                urljoin(repo['_href'], 'actions/publish/'),
                {'id': cls.responses['distribute'][-1].json()['id']},
            ))

        # Search for all units in each of the two repositories.
        body = {'criteria': {}}
        cls.responses['repo units'] = [
            client.post(urljoin(repo['_href'], 'search/units/'), body)
            for repo in repos
        ]
Exemple #21
0
    def setUpClass(cls):
        """Test RPM uploading and downloading, and repo syncing and publishing.

        Do the following:

        1. Create two RPM repositories, both without feeds.
        2. Upload an RPM to the first repository.
        3. Associate the first repository with the second, causing the RPM to
           be copied.
        4. Add a distributor to both repositories, publish them, and download
           RPMs from both repositories.
        """
        super(PublishTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        cls.responses = {}
        cls.rpms = []  # Raw RPMs

        # Download an RPM and create two repositories.
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, gen_repo()) for _ in range(2)]
        for repo in repos:
            cls.resources.add(repo['_href'])
        client.response_handler = api.safe_handler
        cls.rpms.append(client.get(urljoin(RPM_FEED_URL, RPM)).content)

        # Begin an upload request, upload an RPM, move the RPM into a
        # repository, and end the upload request.
        cls.responses['malloc'] = client.post(CONTENT_UPLOAD_PATH)
        cls.responses['upload'] = client.put(
            urljoin(cls.responses['malloc'].json()['_href'], '0/'),
            data=cls.rpms[0],
        )
        cls.responses['import'] = client.post(
            urljoin(repos[0]['_href'], 'actions/import_upload/'),
            {
                'unit_key': {},
                'unit_type_id': 'rpm',
                'upload_id': cls.responses['malloc'].json()['upload_id'],
            },
        )
        cls.responses['free'] = client.delete(
            cls.responses['malloc'].json()['_href'], )

        # Copy content from the first repository to the second.
        cls.responses['copy'] = client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': repos[0]['id']})

        # Add a distributor to and publish both repositories.
        cls.responses['distribute'] = []
        cls.responses['publish'] = []
        for repo in repos:
            cls.responses['distribute'].append(
                client.post(
                    urljoin(repo['_href'], 'distributors/'),
                    gen_distributor(),
                ))
            cls.responses['publish'].append(
                client.post(
                    urljoin(repo['_href'], 'actions/publish/'),
                    {'id': cls.responses['distribute'][-1].json()['id']},
                ))

        # Download the RPM from both repositories.
        for response in cls.responses['distribute']:
            url = urljoin('/pulp/repos/',
                          response.json()['config']['relative_url'])
            url = urljoin(url, RPM)
            cls.rpms.append(client.get(url).content)

        # Search for all units in each of the two repositories.
        body = {'criteria': {}}
        cls.responses['repo units'] = [
            client.post(urljoin(repo['_href'], 'search/units/'), body)
            for repo in repos
        ]
    def setUpClass(cls):
        """Test RPM uploading and downloading, and repo syncing and publishing.

        Do the following:

        1. Create two RPM repositories, both without feeds.
        2. Upload an RPM to the first repository.
        3. Associate the first repository with the second, causing the RPM to
           be copied.
        4. Add a distributor to both repositories, publish them, and download
           RPMs from both repositories.
        """
        super(PublishTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        cls.responses = {}
        cls.rpms = []  # Raw RPMs

        # Download an RPM and create two repositories.
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, _gen_repo()) for _ in range(2)]
        for repo in repos:
            cls.resources.add(repo['_href'])
        client.response_handler = api.safe_handler
        cls.rpms.append(client.get(urljoin(RPM_FEED_URL, RPM)).content)

        # Begin an upload request, upload an RPM, move the RPM into a
        # repository, and end the upload request.
        cls.responses['malloc'] = client.post(CONTENT_UPLOAD_PATH)
        cls.responses['upload'] = client.put(
            urljoin(cls.responses['malloc'].json()['_href'], '0/'),
            data=cls.rpms[0],
        )
        cls.responses['import'] = client.post(
            urljoin(repos[0]['_href'], 'actions/import_upload/'),
            {
                'unit_key': {},
                'unit_type_id': 'rpm',
                'upload_id': cls.responses['malloc'].json()['upload_id'],
            },
        )
        cls.responses['free'] = client.delete(
            cls.responses['malloc'].json()['_href'],
        )

        # Copy content from the first repository to the second.
        cls.responses['copy'] = client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': repos[0]['id']}
        )

        # Add a distributor to and publish both repositories.
        cls.responses['distribute'] = []
        cls.responses['publish'] = []
        for repo in repos:
            cls.responses['distribute'].append(client.post(
                urljoin(repo['_href'], 'distributors/'),
                _gen_distributor(),
            ))
            cls.responses['publish'].append(client.post(
                urljoin(repo['_href'], 'actions/publish/'),
                {'id': cls.responses['distribute'][-1].json()['id']},
            ))

        # Download the RPM from both repositories.
        for response in cls.responses['distribute']:
            url = urljoin(
                '/pulp/repos/',
                response.json()['config']['relative_url']
            )
            url = urljoin(url, RPM)
            cls.rpms.append(client.get(url).content)

        # Search for all units in each of the two repositories.
        body = {'criteria': {}}
        cls.responses['repo units'] = [
            client.post(urljoin(repo['_href'], 'search/units/'), body)
            for repo in repos
        ]
    def setUpClass(cls):
        """Upload puppet module to a repo, copy it to another, publish and download.

        Create two puppet repositories, both without feeds. Upload an module to
        the first repository. Copy its content to the second repository. Add
        distributors to the repositories, publish repositories and download
        modules back from them.
        """
        super(PublishTestCase, cls).setUpClass()
        utils.reset_pulp(cls.cfg)  # See: https://pulp.plan.io/issues/1406
        cls.responses = {}
        cls.modules = []  # Raw puppet modules.

        # Download a puppet module and create two repositories.
        client = api.Client(cls.cfg, api.json_handler)
        repos = [client.post(REPOSITORY_PATH, gen_repo()) for _ in range(2)]
        for repo in repos:
            cls.resources.add(repo['_href'])
        client.response_handler = api.safe_handler
        cls.modules.append(utils.http_get(PUPPET_MODULE_URL))

        # Begin an upload request, upload a puppet module, move the puppet
        # module into a repository, and end the upload request.
        cls.responses['malloc'] = client.post(CONTENT_UPLOAD_PATH)
        cls.responses['upload'] = client.put(
            urljoin(cls.responses['malloc'].json()['_href'], '0/'),
            data=cls.modules[0],
        )
        cls.responses['import'] = client.post(
            urljoin(repos[0]['_href'], 'actions/import_upload/'),
            {
                'unit_key': {},
                'unit_type_id': 'puppet_module',
                'upload_id': cls.responses['malloc'].json()['upload_id'],
            },
        )
        cls.responses['free'] = client.delete(
            cls.responses['malloc'].json()['_href']
        )

        # Copy content from the first puppet repository to the second.
        cls.responses['copy'] = client.post(
            urljoin(repos[1]['_href'], 'actions/associate/'),
            {'source_repo_id': repos[0]['id']}
        )

        # Add a distributor to each repository. Publish each repository.
        for key in {'distribute', 'publish'}:
            cls.responses[key] = []
        for repo in repos:
            cls.responses['distribute'].append(client.post(
                urljoin(repo['_href'], 'distributors/'),
                {
                    'auto_publish': False,
                    'distributor_id': utils.uuid4(),
                    'distributor_type_id': 'puppet_distributor',
                    'distributor_config': {
                        'serve_http': True,
                        'serve_https': True,
                        'relative_url': '/' + utils.uuid4(),
                    },
                }
            ))
            cls.responses['publish'].append(client.post(
                urljoin(repo['_href'], 'actions/publish/'),
                {'id': cls.responses['distribute'][-1].json()['id']},
            ))

        # Query both distributors using all three query forms.
        cls.responses['puppet releases'] = []
        author_name = PUPPET_MODULE['author'] + '/' + PUPPET_MODULE['name']
        for repo in repos:
            if selectors.bug_is_untestable(1440, cls.cfg.version):
                continue
            cls.responses['puppet releases'].append(client.get(
                '/api/v1/releases.json',
                params={'module': author_name},
                auth=('.', repo['id']),
            ))
            cls.responses['puppet releases'].append(client.get(
                '/pulp_puppet/forge/repository/{}/api/v1/releases.json'
                .format(repo['id']),
                params={'module': author_name},
            ))
            if cls.cfg.version < Version('2.8'):
                continue
            cls.responses['puppet releases'].append(client.get(
                '/v3/releases',
                params={'module': author_name},
                auth=('repository', repo['id']),
            ))

        # Download each unit referenced by the queries above.
        for response in cls.responses['puppet releases']:
            body = response.json()
            if set(body.keys()) == {'pagination', 'results'}:  # Puppet >= 3.6
                path = body['results'][0]['file_uri']
            else:
                path = body[author_name][0]['file']
            cls.modules.append(client.get(path).content)

        # Search for all units in each of the two repositories.
        body = {'criteria': {}}
        cls.responses['repo units'] = [
            client.post(urljoin(repo['_href'], 'search/units/'), body)
            for repo in repos
        ]
def tearDownModule():  # pylint:disable=invalid-name
    """Reset Pulp, in case one of the test cases breaks Pulp."""
    utils.reset_pulp(config.get_config())