def make_repo(self, cfg, dist_cfg_updates): """Create a repository with an importer and pair of distributors. Create an RPM repository with: * A yum importer with a valid feed. * A yum distributor. * An RPM rsync distributor referencing the yum distributor. In addition, schedule the repository for deletion. :param pulp_smash.config.PulpSmashConfig cfg: Information about the Pulp deployment being targeted. :param dist_cfg_updates: A dict to be merged into the RPM rsync distributor's ``distributor_config`` dict. At a minimum, this argument should have a value of ``{'remote': {…}}``. :returns: A detailed dict of information about the repo. """ api_client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] body['distributors'].append({ 'distributor_id': utils.uuid4(), 'distributor_type_id': 'rpm_rsync_distributor', 'distributor_config': { 'predistributor_id': body['distributors'][0]['distributor_id'], } }) body['distributors'][1]['distributor_config'].update(dist_cfg_updates) repo = api_client.post(REPOSITORY_PATH, body) self.addCleanup(api_client.delete, repo['_href']) return api_client.get(repo['_href'], params={'details': True})
def test_all(self): """Upload a package group to a repository twice.""" cfg = config.get_config() if check_issue_3104(cfg): self.skipTest('https://pulp.plan.io/issues/3104') client = api.Client(cfg, api.json_handler) self.addCleanup(client.delete, ORPHANS_PATH) # Create a repository. body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) # Give the repository a package group, and publish the repository. package_group = {'id': utils.uuid4(), 'name': utils.uuid4()} _upload_import_package_group(cfg, repo, package_group) repo = client.get(repo['_href'], params={'details': True}) utils.publish_repo(cfg, repo) # Update the repository's package group, and re-publish the repository. package_group['name'] = utils.uuid4() _upload_import_package_group(cfg, repo, package_group) utils.publish_repo(cfg, repo) # Fetch the generated repodata of type 'group' (a.k.a. 'comps'). Verify # the package group portion. root_element = get_repodata(cfg, repo['distributors'][0], 'group') groups = root_element.findall('group') self.assertEqual(len(groups), 1, ElementTree.tostring(root_element)) for key, value in package_group.items(): with self.subTest(key=key): self.assertEqual(groups[0].find(key).text, value)
def setUpClass(cls): """Create a schedule to publish the repository. Do the following: 1. Create a repository with a valid feed 2. Sync it 3. Schedule publish to run every 30 seconds """ super(CreateSuccessTestCase, cls).setUpClass() client = api.Client(cls.cfg) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body).json() cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) # Schedule a publish to run every 30 seconds distributor = gen_distributor() distributor_url = urljoin(repo['_href'], 'distributors/') client.post(distributor_url, distributor) scheduling_url = urljoin( distributor_url, '{}/schedules/publish/'.format(distributor['distributor_id']), ) cls.response = client.post(scheduling_url, {'schedule': 'PT30S'}) cls.attrs = cls.response.json()
def test_update_checksum_type(self): """Check if RPM distributor can receive null checksum_type. See: https://pulp.plan.io/issues/2134. """ cfg = config.get_config() if cfg.pulp_version < version.Version('2.9'): raise unittest.SkipTest('This test requires Pulp 2.9 or above.') client = api.Client(cfg, api.json_handler) distributor = gen_distributor() body = gen_repo() body['distributors'] = [distributor] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) for checksum_type in (None, 'sha256', None): client.put( repo['_href'], { 'distributor_configs': { distributor['distributor_id']: { 'checksum_type': checksum_type, } } }) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual( repo['distributors'][0]['config'].get('checksum_type'), checksum_type)
def setUpClass(cls): """Create a schedule to publish a repo, verify the ``total_run_count``. Do the following: 1. Create a repository with a valid feed 2. Sync it 3. Schedule publish to run every 2 minutes 4. Wait for 130 seconds and read the schedule to get the number of "publish" runs """ super(ScheduledPublishTestCase, cls).setUpClass() client = api.Client(cls.cfg, api.json_handler) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) # Schedule a publish to run every 2 minutes distributor = gen_distributor() client.post(urljoin(repo['_href'], 'distributors/'), distributor) scheduling_url = '/'.join([ 'distributors', distributor['distributor_id'], 'schedules/publish/' ]) schedule_path = urljoin(repo['_href'], scheduling_url) schedule = client.post(schedule_path, {'schedule': 'PT2M'}) # Wait for publish to run time.sleep(130) # Read the schedule cls.response = client.get(schedule['_href'])
def setUpClass(cls): """Create a repository.""" cls.cfg = config.get_config() client = api.Client(cls.cfg) body = gen_repo() body['distributors'] = [gen_distributor()] cls.repo = client.post(REPOSITORY_PATH, body).json()
def setUpClass(cls): """Generate, fetch and parse a ``repomd.xml`` file. Do the following: 1. Create an RPM repository with a YUM distributor and publish it. 2. Fetch the ``repomd.xml`` file from the distributor, and parse it. """ super(RepoMDTestCase, cls).setUpClass() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') if check_issue_2277(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/2277') # Create a repository with a yum distributor and publish it. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) repo = client.get(repo['_href'], params={'details': True}) cls.resources.add(repo['_href']) utils.publish_repo(cls.cfg, repo) # Fetch and parse repomd.xml client.response_handler = xml_handler path = urljoin( '/pulp/repos/', repo['distributors'][0]['config']['relative_url'], ) path = urljoin(path, 'repodata/repomd.xml') cls.root_element = client.get(path)
def setUpClass(cls): """Create an RPM repository, upload package groups, and publish.""" super(UploadPackageGroupsTestCase, cls).setUpClass() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') # Create a repository and add a distributor to it. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) repo = client.get(repo['_href'], params={'details': True}) # Generate several package groups, import them into the repository, and # publish the repository. cls.package_groups = { 'minimal': _gen_minimal_group(), 'realistic': _gen_realistic_group(), } cls.tasks = {} for key, package_group in cls.package_groups.items(): report = _upload_import_package_group(cls.cfg, repo, package_group) cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report)) utils.publish_repo(cls.cfg, repo) # Fetch the generated repodata of type 'group' (a.k.a. 'comps') cls.root_element = (get_repodata(cls.cfg, repo['distributors'][0], 'group'))
def _create_distributor( server_config, href, distributor_type_id, checksum_type=None): """Create an export distributor for the entity at ``href``.""" path = urljoin(href, 'distributors/') body = gen_distributor() body['distributor_type_id'] = distributor_type_id if checksum_type is not None: body['distributor_config']['checksum_type'] = checksum_type return api.Client(server_config).post(path, body).json()
def setUpClass(cls): """Create several schedules. Each schedule is created to test a different failure scenario. """ super(CreateFailureTestCase, cls).setUpClass() client = api.Client(cls.cfg) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body).json() cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) # Add a distibutor distributor = gen_distributor() client.post(urljoin(repo['_href'], 'distributors/'), distributor) client.response_handler = api.echo_handler cls.bodies = ( { 'schedule': None }, # 400 { 'unknown': 'parameter', 'schedule': 'PT30S' }, # 400 ['Incorrect data type'], # 400 { 'missing_required_keys': 'schedule' }, # 400 { 'schedule': 'PT30S' }, # tests incorrect distributor in url, 404 { 'schedule': 'PT30S' }, # tests incorrect repo in url, 404 ) scheduling_url = '/'.join([ 'distributors', distributor['distributor_id'], 'schedules/publish/' ]) bad_distributor_url = '/'.join( ['distributors', utils.uuid4(), 'schedules/publish/']) bad_repo_path = '/'.join([REPOSITORY_PATH, utils.uuid4()]) cls.paths = (urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], scheduling_url), urljoin(repo['_href'], bad_distributor_url), urljoin(bad_repo_path, scheduling_url)) cls.status_codes = (400, 400, 400, 400, 404, 404) cls.responses = [ client.post(path, req_body) for path, req_body in zip(cls.paths, cls.bodies) ]
def test_all(self): """Re-sync a child repository with the ``remove_missing`` enabled.""" repos = [] cfg = config.get_config() if selectors.bug_is_untestable(2616, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2616') # Create 1st repo, sync and publish it. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[0]['_href']) repos[0] = _get_details(cfg, repos[0]) utils.sync_repo(cfg, repos[0]) utils.publish_repo(cfg, repos[0]) # Create 2nd repo, sync. body = gen_repo() body['importer_config']['feed'] = urljoin( cfg.get_base_url(), _PUBLISH_DIR + repos[0]['distributors'][0]['config']['relative_url'], ) body['importer_config']['remove_missing'] = True repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[1]['_href']) repos[1] = _get_details(cfg, repos[1]) utils.sync_repo(cfg, repos[1]) # Remove an arbitrary number of units from 1st repo, re-publish it. units = _get_rpms(cfg, repos[0]) marked_units = random.sample(units, random.randint(1, len(units))) for marked_unit in marked_units: criteria = { 'filters': { 'unit': { 'name': marked_unit['metadata']['name'] } }, 'type_ids': [marked_unit['unit_type_id']], } client.post( urljoin(repos[0]['_href'], 'actions/unassociate/'), {'criteria': criteria}, ) utils.publish_repo(cfg, repos[0]) # Re-sync 2nd repo. report = utils.sync_repo(cfg, repos[1]) tasks = tuple(api.poll_spawned_tasks(cfg, report.json())) self.assertEqual( tasks[0]['result']['removed_count'], len(marked_units), )
def test_all(self): """Sync a repo whose updateinfo file has multiple pkglist sections. Specifically, do the following: 1. Create, sync and publish an RPM repository whose feed is set to :data:`pulp_smash.constants.RPM_PKGLISTS_UPDATEINFO_FEED_URL`. 2. Fetch and parse the published repository's ``updateinfo.xml`` file. Verify that the ``updateinfo.xml`` file has three packages whose ``<filename>`` elements have the following text: * penguin-0.9.1-1.noarch.rpm * shark-0.1-1.noarch.rpm * walrus-5.21-1.noarch.rpm Note that Pulp is free to change the structure of a source repository at will. For example, the source repository has three ``<collection>`` elements, the published repository can have one, two or three ``<collection>`` elements. Assertions are not made about these details. """ cfg = config.get_config() if check_issue_3104(cfg): self.skipTest('https://pulp.plan.io/issues/3104') if selectors.bug_is_untestable(2227, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2277') # Create, sync and publish a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_PKGLISTS_UPDATEINFO_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) utils.sync_repo(cfg, repo) utils.publish_repo(cfg, repo) # Fetch and parse ``updateinfo.xml``. updates_element = (get_repodata(cfg, repo['distributors'][0], 'updateinfo')) # Verify the ``updateinfo.xml`` file. debug = ElementTree.tostring(updates_element) filename_elements = (updates_element.findall( 'update/pkglist/collection/package/filename')) filenames = [ filename_element.text for filename_element in filename_elements ] filenames.sort() self.assertEqual(filenames, [ 'penguin-0.9.1-1.noarch.rpm', 'shark-0.1-1.noarch.rpm', 'walrus-5.21-1.noarch.rpm', ], debug)
def test_all(self): """Test whether copied files retain their original mtime. This test targets the following issues: * `Pulp #2783 <https://pulp.plan.io/issues/2783>`_ * `Pulp Smash #720 <https://github.com/PulpQE/pulp-smash/issues/720>`_ Do the following: 1. Create, sync and publish a repository, with ``generate_sqlite`` set to true. 2. Get the ``mtime`` of the sqlite files. 3. Upload an RPM package into the repository, and sync the repository. 4. Get the ``mtime`` of the sqlite files again. Verify that the mtimes are the same. """ cfg = config.get_config() if selectors.bug_is_untestable(2783, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2783') # Create, sync and publish a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] body['distributors'][0]['distributor_config']['generate_sqlite'] = True repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) utils.sync_repo(cfg, repo) utils.publish_repo(cfg, repo) # Get the mtime of the sqlite files. cli_client = cli.Client(cfg, cli.echo_handler) cmd = '' if utils.is_root(cfg) else 'sudo ' cmd += "bash -c \"stat --format %Y '{}'/*\"".format( os.path.join( _PATH, repo['distributors'][0]['config']['relative_url'], 'repodata', )) mtimes_pre = ( cli_client.machine.session().run(cmd)[1].strip().split().sort()) # Upload to the repo, and sync it. rpm = utils.http_get(RPM_SIGNED_URL) utils.upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo) utils.sync_repo(cfg, repo) # Get the mtime of the sqlite files again. time.sleep(1) mtimes_post = ( cli_client.machine.session().run(cmd)[1].strip().split().sort()) self.assertEqual(mtimes_pre, mtimes_post)
def setUpClass(cls): """Create and sync a repository.""" super(ForceFullTestCase, cls).setUpClass() client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) cls.repo = client.get(repo['_href'], params={'details': True})
def test_all(self): """Sync a repository, change its feed, and sync it again.""" if check_issue_3104(self.cfg): self.skipTest('https://pulp.plan.io/issues/3104') # Create, sync and publish repositories A and B. repos = [] for _ in range(2): body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] repos.append(self.create_sync_publish_repo(body)) # Create repository C, let it sync from repository A, and publish it. body = gen_repo() body['importer_config']['feed'] = self.get_feed(repos[0]) body['importer_config']['ssl_validation'] = False body['distributors'] = [gen_distributor()] repo = self.create_sync_publish_repo(body) # Update repository C. client = api.Client(self.cfg, api.json_handler) feed = self.get_feed(repos[1]) client.put(repo['importers'][0]['_href'], {'importer_config': { 'feed': feed }}) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual(repo['importers'][0]['config']['feed'], feed) # Sync and publish repository C. utils.sync_repo(self.cfg, repo) utils.publish_repo(self.cfg, repo) rpm = utils.http_get(RPM_UNSIGNED_URL) response = get_unit(self.cfg, repo['distributors'][0], RPM) with self.subTest(): self.assertIn(response.headers['content-type'], ('application/octet-stream', 'application/x-rpm')) with self.subTest(): self.assertEqual(rpm, response.content)
def test_update_on_copy(self): """Check if copying units into a repo updates ``last_unit_added``. Do the following: 1. Create a repository with a feed and sync it. 2. Create a second repository. Assert the second repository's ``last_unit_added`` attribute is null. 3. Copy a content unit from first repository to the second. Assert the second repository's ``last_unit_added`` attribute is non-null. 4. Publish the second repository. Assert its ``last_unit_added`` attribute is non-null. """ if selectors.bug_is_untestable(2688, self.cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2688') # create a repo with a feed and sync it utils.sync_repo(self.cfg, self.repo) self.repo = self.client.get(self.repo['_href'], params={'details': True}) # create a second repository body = gen_repo() body['distributors'] = [gen_distributor()] repo2 = self.client.post(REPOSITORY_PATH, body) self.addCleanup(self.client.delete, repo2['_href']) repo2 = self.client.get(repo2['_href'], params={'details': True}) with self.subTest(comment='after repository creation'): self.assertIsNone(repo2['last_unit_added']) # copy a content unit from the first repo to the second self.client.post( urljoin(repo2['_href'], 'actions/associate/'), { 'source_repo_id': self.repo['id'], 'criteria': { 'filters': { 'unit': { 'name': 'bear' } }, 'type_ids': ['rpm'], }, }) repo2 = self.client.get(repo2['_href'], params={'details': True}) with self.subTest(comment='after unit association'): self.assertIsNotNone(repo2['last_unit_added'], repo2) # publish the second repo utils.publish_repo(self.cfg, repo2) repo2 = self.client.get(repo2['_href'], params={'details': True}) with self.subTest(comment='after repository publish'): self.assertIsNotNone(repo2['last_unit_added'], repo2)
def test_01_create_root_repo(self): """Create, sync and publish a repository. The repositories created in later steps sync from this one. """ client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] self.repos['root'] = client.post(REPOSITORY_PATH, body) self.repos['root'] = _get_details(self.cfg, self.repos['root']) utils.sync_repo(self.cfg, self.repos['root']) utils.publish_repo(self.cfg, self.repos['root'])
def test_all(self): """Check for content synced from a feed with PULP_DISTRIBUTION.xml.""" if self.cfg.pulp_version < version.Version('2.11.2'): self.skipTest( 'PULP_DISTRIBUTION.xml improved parsing is available on Pulp ' '2.11.2+') client = api.Client(self.cfg, api.json_handler) distributor = gen_distributor() distributor['auto_publish'] = True body = gen_repo() body['distributors'] = [distributor] body['importer_config'] = { 'feed': RPM_WITH_PULP_DISTRIBUTION_FEED_URL, } repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) utils.sync_repo(self.cfg, repo) repo = client.get(repo['_href'], params={'details': True}) self.assertEqual(repo['content_unit_counts']['distribution'], 1) cli_client = cli.Client(self.cfg, cli.code_handler) relative_url = repo['distributors'][0]['config']['relative_url'] sudo = () if utils.is_root(self.cfg) else ('sudo', ) pulp_distribution = cli_client.run(sudo + ( 'cat', os.path.join( '/var/lib/pulp/published/yum/http/repos/', relative_url, 'PULP_DISTRIBUTION.xml', ), )).stdout # make sure published repository PULP_DISTRIBUTION.xml does not include # any extra file from the original repo's PULP_DISTRIBUTION.xml under # metadata directory self.assertNotIn('metadata/productid', pulp_distribution) release_info = cli_client.run(sudo + ( 'cat', os.path.join( '/var/lib/pulp/published/yum/http/repos/', relative_url, 'release-notes/release-info', ), )).stdout response = requests.get( urljoin( urljoin(RPM_WITH_PULP_DISTRIBUTION_FEED_URL, 'release-notes/'), 'release-info', )) # make sure published repository has extra files outside the metadata # directory from the origiginal repo's PULP_DISTRIBUTION.xml self.assertEqual(release_info, response.text)
def test_all(self): """Test that recursive copy of erratas copies RPM packages. This test targets the following issues: * `Pulp Smash #769 <https://github.com/PulpQE/pulp-smash/issues/769>`_ * `Pulp #3004 <https://pulp.plan.io/issues/3004>`_ Do the following: 1. Create and sync a repository with errata, and RPM packages. 2. Create second repository. 3. Copy units from from first repository to second repository using ``recursive`` as true, and filter ``type_id`` as ``erratum``. 4. Assert that RPM packages were copied. """ cfg = config.get_config() if selectors.bug_is_untestable(3004, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/3004') repos = [] client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UPDATED_INFO_FEED_URL body['distributors'] = [gen_distributor()] repos.append(client.post(REPOSITORY_PATH, body)) self.addCleanup(client.delete, repos[0]['_href']) utils.sync_repo(cfg, repos[0]) # Create a second repository. repos.append(client.post(REPOSITORY_PATH, gen_repo())) self.addCleanup(client.delete, repos[1]['_href']) # Copy data to second repository. client.post( urljoin(repos[1]['_href'], 'actions/associate/'), { 'source_repo_id': repos[0]['id'], 'override_config': { 'recursive': True }, 'criteria': { 'filters': {}, 'type_ids': ['erratum'] }, }) # Assert that RPM packages were copied. units = utils.search_units(cfg, repos[1], {'type_ids': ['rpm']}) self.assertGreater(len(units), 0)
def _create_repo(server_config, download_policy): """Create an RPM repository with the given download policy. The repository has a valid feed and is configured to auto-publish. Return the JSON-decoded response body. """ body = gen_repo() body['importer_config']['download_policy'] = download_policy body['importer_config']['feed'] = RPM_SIGNED_FEED_URL distributor = gen_distributor() distributor['auto_publish'] = True distributor['distributor_config']['relative_url'] = body['id'] body['distributors'] = [distributor] return api.Client(server_config).post(REPOSITORY_PATH, body).json()
def _create_sync_repo(self, cfg): """Create and sync a repository. Return a detailed dict of repo info. Also, schedule the repository for deletion with ``addCleanup()``. """ client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) utils.sync_repo(cfg, repo) repo = client.get(repo['_href'], params={'details': True}) return client.get(repo['_href'], params={'details': True})
def test_all(self): """Publish a repository with the repoview feature on and off.""" cfg = config.get_config() if cfg.pulp_version < Version('2.9'): self.skipTest('https://pulp.plan.io/issues/189') # Create a repo, and add content client = api.Client(cfg) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body).json() self.addCleanup(client.delete, repo['_href']) rpm = utils.http_get(constants.RPM_UNSIGNED_URL) utils.upload_import_unit(cfg, rpm, {'unit_type_id': 'rpm'}, repo) # Get info about the repo distributor repo = client.get(repo['_href'], params={'details': True}).json() pub_path = urljoin('/pulp/repos/', repo['distributors'][0]['config']['relative_url']) # Publish the repo utils.publish_repo(cfg, repo) response = client.get(pub_path) with self.subTest(comment='first publish'): self.assertEqual(len(response.history), 0, response.history) # Publish the repo a second time utils.publish_repo( cfg, repo, { 'id': repo['distributors'][0]['id'], 'override_config': { 'generate_sqlite': True, 'repoview': True }, }) response = client.get(pub_path) with self.subTest(comment='second publish'): self.assertEqual(len(response.history), 1, response.history) self.assertEqual( response.request.url, urljoin(response.history[0].request.url, 'repoview/index.html')) # Publish the repo a third time if selectors.bug_is_untestable(2349, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2349') utils.publish_repo(cfg, repo) response = client.get(pub_path) with self.subTest(comment='third publish'): self.assertEqual(len(response.history), 0, response.history)
def test_all(self): """Bind a consumer to a distributor. Do the following: 1. Create a repository with a distributor. 2. Create a consumer. 3. Bind the consumer to the distributor. Assert that: * The response has an HTTP 200 status code. * The response body contains the correct values. """ cfg = config.get_config() client = api.Client(cfg) # Steps 1–2 body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body).json() self.addCleanup(client.delete, repo['_href']) consumer = client.post(CONSUMERS_PATH, {'id': utils.uuid4()}).json() self.addCleanup(client.delete, consumer['consumer']['_href']) # Step 3 repo = client.get(repo['_href'], params={'details': True}).json() path = urljoin(CONSUMERS_PATH, consumer['consumer']['id'] + '/') path = urljoin(path, 'bindings/') body = { 'binding_config': { 'B': 21 }, 'distributor_id': repo['distributors'][0]['id'], 'notify_agent': False, 'repo_id': repo['id'], } response = client.post(path, body) with self.subTest(comment='check response status code'): self.assertEqual(response.status_code, 200) result = response.json()['result'] with self.subTest(comment='check response body'): self.assertEqual(result['binding_config'], body['binding_config']) self.assertEqual(result['consumer_id'], consumer['consumer']['id']) self.assertEqual(result['distributor_id'], body['distributor_id']) self.assertEqual(result['repo_id'], body['repo_id'])
def health_check(self): """Execute step three of the test plan.""" client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) repo = client.get(repo['_href'], params={'details': True}) utils.sync_repo(self.cfg, repo) utils.publish_repo(self.cfg, repo) pulp_rpm = get_unit(self.cfg, repo['distributors'][0], RPM).content # Does this RPM match the original RPM? rpm = utils.http_get(RPM_SIGNED_URL) self.assertEqual(rpm, pulp_rpm)
def setUpClass(cls): """Create an RPM repository with a feed and distributor.""" super().setUpClass() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL body['distributors'] = [gen_distributor()] try: repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) cls.repo = client.get(repo['_href'], params={'details': True}) except: # noqa:E722 cls.tearDownClass() raise
def create_repo(self, cfg, feed, download_policy): """Create an RPM repository with the given feed and download policy. Also, schedule the repository for deletion at the end of the current test. Return a detailed dict of information about the just-created repository. """ client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = feed body['importer_config']['download_policy'] = download_policy distributor = gen_distributor() distributor['distributor_config']['relative_url'] = body['id'] body['distributors'] = [distributor] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) return client.get(repo['_href'], params={'details': True})
def do_test(self, feed): """Verify ``checksum_type`` is updated on the repo metadata.""" cfg = config.get_config() if check_issue_3104(cfg): self.skipTest('https://pulp.plan.io/issues/3104') client = api.Client(cfg, api.json_handler) # Create and sync a repository. body = gen_repo() body['importer_config']['feed'] = feed body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) utils.sync_repo(cfg, repo) repo = client.get(repo['_href'], params={'details': True}) distributor = repo['distributors'][0] # Update checksum type to be "sha256" and publish the repository. client.put(distributor['_href'], { 'distributor_config': {'checksum_type': 'sha256'} }) utils.publish_repo(cfg, repo) with self.subTest(comment='primary.xml'): self.verify_primary_xml(cfg, distributor, 'sha256') with self.subTest(comment='filelists.xml'): self.verify_filelists_xml(cfg, distributor, 'sha256') with self.subTest(comment='other.xml'): self.verify_other_xml(cfg, distributor, 'sha256') if feed == DRPM_UNSIGNED_FEED_URL: with self.subTest(comment='prestodelta.xml'): self.verify_presto_delta_xml(cfg, distributor, 'sha256') # Update the checksum type to "sha1", and re-publish the repository. client.put(distributor['_href'], { 'distributor_config': {'checksum_type': 'sha1', 'force_full': True} }) utils.publish_repo(cfg, repo) with self.subTest(comment='primary.xml'): self.verify_primary_xml(cfg, distributor, 'sha1') with self.subTest(comment='filelists.xml'): self.verify_filelists_xml(cfg, distributor, 'sha1') with self.subTest(comment='other.xml'): self.verify_other_xml(cfg, distributor, 'sha1') if feed == DRPM_UNSIGNED_FEED_URL: with self.subTest(comment='prestodelta.xml'): self.verify_presto_delta_xml(cfg, distributor, 'sha1')
def setUpClass(cls): """Create and sync a repository.""" cls.cfg = config.get_config() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') if check_issue_2620(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/2620') client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL cls.repo = client.post(REPOSITORY_PATH, body) try: cls.repo = client.get(cls.repo['_href'], params={'details': True}) except: # noqa:E722 cls.tearDownClass() raise cls.updateinfo_xml_hrefs = []
def setUpClass(cls): """Create three schedules and read, update and delete them.""" super(ReadUpdateDeleteTestCase, cls).setUpClass() client = api.Client(cls.cfg, api.json_handler) # Create a repo with a valid feed and sync it body = gen_repo() body['importer_config']['feed'] = RPM_SIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) utils.sync_repo(cls.cfg, repo) # Create schedules distributor = gen_distributor() client.post(urljoin(repo['_href'], 'distributors/'), distributor) scheduling_url = '/'.join([ 'distributors', distributor['distributor_id'], 'schedules/publish/' ]) scheduling_path = urljoin(repo['_href'], scheduling_url) cls.schedules = tuple((client.post(scheduling_path, {'schedule': 'PT30S'}) for _ in range(3))) cls.responses = {} client.response_handler = api.safe_handler # Attributes that may be changed after creation cls.mutable_attrs = [ 'consecutive_failures', 'last_run_at', 'last_updated', 'next_run', 'first_run', 'remaining_runs', 'total_run_count' ] # Read the first schedule cls.responses['read_one'] = client.get(cls.schedules[0]['_href']) # Read all schedules for the repo cls.responses['read_many'] = client.get(scheduling_path) # Update the second schedule cls.update_body = {'schedule': 'PT1M'} cls.responses['update'] = client.put(cls.schedules[1]['_href'], cls.update_body) # Delete the third schedule cls.responses['delete'] = client.delete(cls.schedules[2]['_href'])
def setUpClass(cls): """Create, populate and publish a repository. More specifically, do the following: 1. Create an RPM repository with a distributor. 2. Populate the repository with an RPM and two errata, where one erratum references the RPM, and the other does not. 3. Publish the repository Fetch and parse its ``updateinfo.xml`` file. """ super(UpdateInfoTestCase, cls).setUpClass() if check_issue_3104(cls.cfg): raise unittest.SkipTest('https://pulp.plan.io/issues/3104') cls.errata = {key: _gen_errata() for key in ('full', 'partial')} del cls.errata['partial']['pkglist'] cls.tasks = {} # Create a repo. client = api.Client(cls.cfg, api.json_handler) body = gen_repo() body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) cls.resources.add(repo['_href']) try: # Populate and publish the repo. repo = client.get(repo['_href'], params={'details': True}) unit = utils.http_get(RPM_UNSIGNED_URL) utils.upload_import_unit(cls.cfg, unit, {'unit_type_id': 'rpm'}, repo) for key, erratum in cls.errata.items(): report = utils.upload_import_erratum(cls.cfg, erratum, repo['_href']) cls.tasks[key] = tuple(api.poll_spawned_tasks(cls.cfg, report)) utils.publish_repo(cls.cfg, repo) # Fetch and parse updateinfo.xml. cls.updates_element = (get_repodata(cls.cfg, repo['distributors'][0], 'updateinfo')) except: # noqa:E722 cls.tearDownClass() raise