def test_conflicting_stopped(self): """Test with :data:`CONFLICTING_SERVICES` stopped.""" cli.GlobalServiceManager(self.cfg).stop(( 'pulp_celerybeat', 'pulp_resource_manager', 'pulp_workers', )) cli.Client(self.cfg).run(self.cmd, sudo=True)
def test_resource_manager_running(self): """Test with ``pulp_resource_manager`` running. This test targets `Pulp #2684 <https://pulp.plan.io/issues/2684>`_. """ if not selectors.bug_is_fixed(2684, self.cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2684') cli.GlobalServiceManager(config.get_config()).stop( (CONFLICTING_SERVICES.difference(('pulp_resource_manager', )))) self._do_test()
def setUp(self): """Ensure there is only one Pulp worker.""" self.cfg = config.get_config() if selectors.bug_is_untestable(2835, self.cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2835') sudo = '' if utils.is_root(self.cfg) else 'sudo' cli.Client(self.cfg).machine.session().run( "{} bash -c 'echo PULP_CONCURRENCY=1 >> {}'".format( sudo, _PULP_WORKERS_CFG)) cli.GlobalServiceManager(self.cfg).restart(PULP_SERVICES)
def test_all(self): """Test whether ``httpd`` dispatches a task while the broker is down. This test targets the following issues: * `Pulp Smash #650 <https://github.com/PulpQE/pulp-smash/issues/650>`_ * `Pulp #2770 <https://pulp.plan.io/issues/2770>`_ This test does the following: 1. Create a repository. 2. Stop the AMQP broker. (Also, schedule it to be re-started later!) 3. Sync the repository, ignore any errors that are returned when doing so, and assert that no tasks are left in the ``waiting`` state. """ cfg = config.get_config() if selectors.bug_is_untestable(2770, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2770') # Create a repository. client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config']['feed'] = RPM_UNSIGNED_FEED_URL repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) # Stop the AMQP broker. broker = [utils.get_broker(cfg)] svc_mgr = cli.GlobalServiceManager(cfg) svc_mgr.stop(broker) self.addCleanup(svc_mgr.start, broker) # Sync the repo, and assert no tasks are left in the waiting state. try: utils.sync_repo(cfg, repo) except HTTPError: pass tasks = client.post( urljoin(TASKS_PATH, 'search/'), { 'criteria': { 'fields': [ 'finish_time', 'start_time', 'state', 'tags', 'task_id', ], 'filters': { 'state': { '$in': ['waiting'] } }, } }) self.assertEqual(len(tasks), 0, tasks)
def test_all(self): """Test Pulp's handling of its ``PULP_MAX_TASKS_PER_CHILD`` setting.""" cfg = config.get_config() if not selectors.bug_is_fixed(2172, cfg.pulp_version): self.skipTest('https://pulp.plan.io/issues/2172') pulp_3540_testable = selectors.bug_is_fixed(3540, cfg.pulp_version) if os_is_f27(cfg) and not pulp_3540_testable: self.skipTest('https://pulp.plan.io/issues/3540') svc_mgr = cli.GlobalServiceManager(cfg) sudo = () if cli.is_root(cfg) else ('sudo', ) set_cmd = sudo + ( 'sed', '-i', '-e', 's/.*PULP_MAX_TASKS_PER_CHILD=[0-9]*$/PULP_MAX_TASKS_PER_CHILD=2/', '/etc/default/pulp_workers') unset_cmd = sudo + ( 'sed', '-i', '-e', 's/^PULP_MAX_TASKS_PER_CHILD=2$/# PULP_MAX_TASKS_PER_CHILD=2/', '/etc/default/pulp_workers') procs_over_time = [] # Step 1 procs_over_time.append(get_pulp_worker_procs(cfg)) for proc in procs_over_time[-1]: self.assertNotIn('--maxtasksperchild=2', proc, procs_over_time) # Step 2 client = cli.Client(cfg) client.run(set_cmd) self.addCleanup(svc_mgr.restart, PULP_SERVICES) if not pulp_3540_testable: self.addCleanup(time.sleep, 30) self.addCleanup(client.run, unset_cmd) svc_mgr.restart(PULP_SERVICES) procs_over_time.append(get_pulp_worker_procs(cfg)) for proc in procs_over_time[-1]: self.assertIn('--maxtasksperchild=2', proc, procs_over_time) # Step 3 repo_id = utils.uuid4() proc = client.run(('pulp-admin', 'rpm', 'repo', 'create', '--repo-id', repo_id, '--feed', RPM_UNSIGNED_FEED_URL)) self.addCleanup( client.run, ('pulp-admin', 'rpm', 'repo', 'delete', '--repo-id', repo_id)) self.assertNotIn('Task Failed', proc.stdout) proc = client.run( ('pulp-admin', 'rpm', 'repo', 'sync', 'run', '--repo-id', repo_id)) self.assertNotIn('Task Failed', proc.stdout) # Step 4 self.doCleanups() procs_over_time.append(get_pulp_worker_procs(cfg)) for proc in procs_over_time[-1]: self.assertNotIn('--maxtasksperchild=2', proc, procs_over_time)
def setUp(self): """Provide a server config and Pulp services to stop and start.""" self.cfg = config.get_config() if check_issue_3104(self.cfg): self.skipTest('https://pulp.plan.io/issues/3104') if check_issue_2277(self.cfg): self.skipTest('https://pulp.plan.io/issues/2277') if check_issue_2387(self.cfg): self.skipTest('https://pulp.plan.io/issues/2387') self.broker = (get_broker(self.cfg),) self.svc_mgr = cli.GlobalServiceManager(self.cfg)
def create_sync_publish_repo( self, cfg, importer_config, distributors=None): """Create, sync and publish a repository. Specifically do the following: 1. Create a repository and schedule it for deletion. 2. Sync and publish the repository. 3. Make Crane immediately re-read the metadata files published by Pulp. (Restart Apache) :param cfg: Information about a Pulp deployment. :param importer_config: An importer configuration to pass when creating the repository. For example: ``{'feed': '…'}``. :param distributors: Distributor configurations to pass when creating the repository. If no value is passed, one will be generated. :returns: A detailed dict of information about the repository. """ # create repository client = api.Client(cfg, api.json_handler) body = gen_repo() body['importer_config'].update(importer_config) if distributors is None: body['distributors'] = [gen_distributor()] else: body['distributors'] = distributors repo = client.post(REPOSITORY_PATH, body) self.addCleanup(client.delete, repo['_href']) # Sync, publish, and re-read metadata. repo = client.get(repo['_href'], params={'details': True}) sync_repo(cfg, repo) publish_repo(cfg, repo) cli.GlobalServiceManager(cfg).restart(('httpd',)) return client.get(repo['_href'], params={'details': True})
def reset_squid(cfg): """Stop Squid, reset its cache directory, and restart it. :param pulp_smash.config.PulpSmashConfig cfg: Information about a Pulp host. :returns: Nothing. """ squid_version = _get_squid_version(cfg) svc_mgr = cli.GlobalServiceManager(cfg) svc_mgr.stop(('squid', )) # Remove and re-initialize the cache directory. client = cli.Client(cfg) client.run(('rm', '-rf', '/var/spool/squid'), sudo=True) client.run(('mkdir', '--context=system_u:object_r:squid_cache_t:s0', '--mode=750', '/var/spool/squid'), sudo=True) client.run(('chown', 'squid:squid', '/var/spool/squid'), sudo=True) if squid_version < Version('4'): client.run(('squid', '-z'), sudo=True) else: client.run(('squid', '-z', '--foreground'), sudo=True) svc_mgr.start(('squid', ))
def test_all(self): """Test that Pulp deals well with missing workers.""" # Create a repository. No repository addCleanup is necessary, because # Pulp will be reset after this test. client = api.Client(self.cfg, api.json_handler) body = gen_repo() body['importer_config']['feed_url'] = RPM_MIRRORLIST_LARGE body['distributors'] = [gen_distributor()] repo = client.post(REPOSITORY_PATH, body) repo = client.get(repo['_href'], params={'details': True}) # Start syncing the repository and restart pulp_workers. client.response_handler = api.code_handler client.post(urljoin(repo['_href'], 'actions/sync/')) cli.GlobalServiceManager(self.cfg).restart(('pulp_workers', )) # Update and sync the repository. client.response_handler = api.safe_handler client.put(repo['_href'], { 'importer_config': { 'feed': RPM_UNSIGNED_FEED_URL }, }) utils.sync_repo(self.cfg, repo)
def test_celerybeat_running(self): """Test with ``pulp_celerybeat`` running.""" cli.GlobalServiceManager(config.get_config()).stop( (CONFLICTING_SERVICES.difference(('pulp_celerybeat', )))) self._do_test()
def test_required_stopped(self): """Test with :data:`REQUIRED_SERVICES` stopped.""" cli.GlobalServiceManager(self.cfg).stop(REQUIRED_SERVICES) self._do_test()
def tearDown(self): """Start all of Pulp's services.""" cli.GlobalServiceManager(config.get_config()).start( CONFLICTING_SERVICES.union(REQUIRED_SERVICES))