def test_verify_extensions_swift(self): def fake_list_extensions(): return (None, {'fake1': 'metadata', 'fake2': 'metadata', 'not_fake': 'metadata', 'swift': 'metadata'}) fake_os = mock.MagicMock() fake_os.capabilities_client.list_capabilities = fake_list_extensions self.useFixture(fixtures.MockPatchObject( verify_tempest_config, 'get_enabled_extensions', return_value=(['fake1', 'fake2', 'fake3']))) results = verify_tempest_config.verify_extensions(fake_os, 'swift', {}) self.assertIn('swift', results) self.assertIn('fake1', results['swift']) self.assertTrue(results['swift']['fake1']) self.assertIn('fake2', results['swift']) self.assertTrue(results['swift']['fake2']) self.assertIn('fake3', results['swift']) self.assertFalse(results['swift']['fake3']) self.assertIn('not_fake', results['swift']) self.assertFalse(results['swift']['not_fake'])
def test_policy_failure(self): pools = objects.PoolList.from_list([{ "id": "6c346011-e581-429b-a7a2-6cdf0aba91c3" }]) self.useFixture( fixtures.MockPatchObject(policy, 'check', side_effect=exceptions.Forbidden)) self.assertRaises( exceptions.Forbidden, self.test_filter.filter, self.context, pools, self.zone, ) policy.check.assert_called_once_with('zone_create_forced_pool', self.context, pools[0])
def setUp(self): super(BaseTestCase, self).setUp() self.useFixture(fixtures.NestedTempfile()) self.useFixture(fixtures.TempHomeDir()) self.useFixture(fixtures.MockPatchObject(sys, 'exit', side_effect=UnexpectedExit)) self.useFixture(log_fixture.get_logging_handle_error_fixture()) warnings.filterwarnings('error', category=DeprecationWarning, module='^keystone\\.') warnings.simplefilter('error', exc.SAWarning) if hasattr(exc, "RemovedIn20Warning"): warnings.simplefilter('ignore', exc.RemovedIn20Warning) self.addCleanup(warnings.resetwarnings) # Ensure we have an empty threadlocal context at the start of each # test. self.assertIsNone(oslo_context.get_current()) self.useFixture(oslo_ctx_fixture.ClearRequestContext()) orig_debug_level = ldap.get_option(ldap.OPT_DEBUG_LEVEL) self.addCleanup(ldap.set_option, ldap.OPT_DEBUG_LEVEL, orig_debug_level) orig_tls_cacertfile = ldap.get_option(ldap.OPT_X_TLS_CACERTFILE) if orig_tls_cacertfile is None: orig_tls_cacertfile = '' self.addCleanup(ldap.set_option, ldap.OPT_X_TLS_CACERTFILE, orig_tls_cacertfile) orig_tls_cacertdir = ldap.get_option(ldap.OPT_X_TLS_CACERTDIR) # Setting orig_tls_cacertdir to None is not allowed. if orig_tls_cacertdir is None: orig_tls_cacertdir = '' self.addCleanup(ldap.set_option, ldap.OPT_X_TLS_CACERTDIR, orig_tls_cacertdir) orig_tls_require_cert = ldap.get_option(ldap.OPT_X_TLS_REQUIRE_CERT) self.addCleanup(ldap.set_option, ldap.OPT_X_TLS_REQUIRE_CERT, orig_tls_require_cert) self.addCleanup(ks_ldap.PooledLDAPHandler.connection_pools.clear)
def test_view_args_populated_in_policy_dict(self): # Setup the "resource" object and make a call that has view arguments # (substituted values in the URL). Make sure to use an policy enforcer # that properly checks (substitutes in) a value that is not in "target" # path but in the main policy dict path. def _enforce_mock_func(credentials, action, target, do_raise=True): if 'argument_id' not in target: raise exception.ForbiddenAction(action=action) self.useFixture( fixtures.MockPatchObject(self.enforcer, '_enforce', _enforce_mock_func)) argument_id = uuid.uuid4().hex # Check with a call that will populate view_args. with self.test_client() as c: path = '/v3/auth/tokens' body = self._auth_json() r = c.post(path, json=body, follow_redirects=True, expected_status_code=201) token_id = r.headers['X-Subject-Token'] c.get('%s/argument/%s' % (self.restful_api_url_prefix, argument_id), headers={'X-Auth-Token': token_id}) # Use any valid policy as _enforce is mockpatched out self.enforcer.enforce_call(action='example:allowed') c.get('%s/argument' % self.restful_api_url_prefix, headers={'X-Auth-Token': token_id}) self.assertRaises(exception.ForbiddenAction, self.enforcer.enforce_call, action='example:allowed')
def test___init___plugin_service_clients_name_conflict(self): creds = fake_credentials.FakeKeystoneV3Credentials() uri = 'fake_uri' fake_service_clients = { 'serviceA': [{ 'name': 'client1', 'service_version': 'client1.v1', 'module_path': 'fake_path_1', 'client_names': ['SomeClient1'] }], 'serviceB': [{ 'name': 'client1', 'service_version': 'client1.v2', 'module_path': 'fake_path_2', 'client_names': ['SomeClient2'] }], 'serviceC': [{ 'name': 'client1', 'service_version': 'client1.v1', 'module_path': 'fake_path_2', 'client_names': ['SomeClient1'] }], 'serviceD': [{ 'name': 'client1', 'service_version': 'client1.v2', 'module_path': 'fake_path_2', 'client_names': ['SomeClient2'] }] } msg = "(?=.*{0})(?=.*{1})".format(*[ x[1][0]['service_version'] for x in six.iteritems(fake_service_clients) ]) self.useFixture( fixtures.MockPatchObject(clients.ClientsRegistry(), 'get_service_clients', return_value=fake_service_clients)) with testtools.ExpectedException(testtools.MultipleExceptions, value_re=msg): clients.ServiceClients(creds, identity_uri=uri)
def test_verify_extensions_neutron(self): def fake_list_extensions(): return {'extensions': [{'alias': 'fake1'}, {'alias': 'fake2'}, {'alias': 'not_fake'}]} fake_os = mock.MagicMock() fake_os.network_extensions_client.list_extensions = ( fake_list_extensions) self.useFixture(fixtures.MockPatchObject( verify_tempest_config, 'get_enabled_extensions', return_value=(['fake1', 'fake2', 'fake3']))) results = verify_tempest_config.verify_extensions(fake_os, 'neutron', {}) self.assertIn('neutron', results) self.assertIn('fake1', results['neutron']) self.assertTrue(results['neutron']['fake1']) self.assertIn('fake2', results['neutron']) self.assertTrue(results['neutron']['fake2']) self.assertIn('fake3', results['neutron']) self.assertFalse(results['neutron']['fake3']) self.assertIn('not_fake', results['neutron']) self.assertFalse(results['neutron']['not_fake'])
def setUp(self): super(EventPipelineTestCase, self).setUp() self.CONF = service.prepare_service([], []) self.test_event = models.Event( message_id=uuid.uuid4(), event_type='a', generated=datetime.datetime.utcnow(), traits=[ models.Trait('t_text', 1, 'text_trait'), models.Trait('t_int', 2, 'int_trait'), models.Trait('t_float', 3, 'float_trait'), models.Trait('t_datetime', 4, 'datetime_trait') ], raw={'status': 'started'}) self.test_event2 = models.Event( message_id=uuid.uuid4(), event_type='b', generated=datetime.datetime.utcnow(), traits=[ models.Trait('t_text', 1, 'text_trait'), models.Trait('t_int', 2, 'int_trait'), models.Trait('t_float', 3, 'float_trait'), models.Trait('t_datetime', 4, 'datetime_trait') ], raw={'status': 'stopped'}) self.useFixture( fixtures.MockPatchObject(publisher, 'get_publisher', side_effect=self.get_publisher)) self._setup_pipeline_cfg() self._reraise_exception = True self.useFixture( fixtures.MockPatch('ceilometer.pipeline.base.LOG.exception', side_effect=self._handle_reraise_exception))
def setUp(self): super().setUp() self.snapcraft_yaml = fixture_setup.SnapcraftYaml( self.path, parts={"part0": {"plugin": "nil"}}, ) self.useFixture(self.snapcraft_yaml) self.mock_lc_init = self.useFixture( fixtures.MockPatch("snapcraft.cli.remote.LaunchpadClient", autospec=True) ).mock self.mock_lc = self.mock_lc_init.return_value self.mock_lc_architectures = mock.PropertyMock(return_value=["i386"]) type(self.mock_lc).architectures = self.mock_lc_architectures self.mock_lc.has_outstanding_build.return_value = False self.mock_project = self.useFixture( fixtures.MockPatchObject( snapcraft.project.Project, "_get_project_directory_hash", return_value="fakehash123", ) )
def setUp(self): super(TestMACHandlers, self).setUp() self.mac = 'ff:ff:ff:ff:ff:ff' self.dhcp_hostsdir = '/far' CONF.set_override('dhcp_hostsdir', self.dhcp_hostsdir, 'dnsmasq_pxe_filter') self.mock_join = self.useFixture( fixtures.MockPatchObject(os.path, 'join')).mock self.mock_join.return_value = "%s/%s" % (self.dhcp_hostsdir, self.mac) self.mock__exclusive_write_or_pass = self.useFixture( fixtures.MockPatchObject(dnsmasq, '_exclusive_write_or_pass')).mock self.mock_stat = self.useFixture( fixtures.MockPatchObject(os, 'stat')).mock self.mock_listdir = self.useFixture( fixtures.MockPatchObject(os, 'listdir')).mock self.mock_remove = self.useFixture( fixtures.MockPatchObject(os, 'remove')).mock self.mock_log = self.useFixture( fixtures.MockPatchObject(dnsmasq, 'LOG')).mock self.mock_introspection_active = self.useFixture( fixtures.MockPatchObject(node_cache, 'introspection_active')).mock
def test_verify_extensions_nova_all(self): def fake_list_extensions(): return ({ 'extensions': [{ 'alias': 'fake1' }, { 'alias': 'fake2' }, { 'alias': 'not_fake' }] }) fake_os = mock.MagicMock() fake_os.extensions_client.list_extensions = fake_list_extensions self.useFixture( fixtures.MockPatchObject(verify_tempest_config, 'get_enabled_extensions', return_value=(['all']))) results = verify_tempest_config.verify_extensions(fake_os, 'nova', {}) self.assertIn('nova', results) self.assertIn('extensions', results['nova']) self.assertEqual(sorted(['fake1', 'fake2', 'not_fake']), sorted(results['nova']['extensions']))
def test_emit_cfg_log_notifier_in_notifier_drivers(self): drivers = ['messaging', 'log'] self.config(driver=drivers, group='oslo_messaging_notifications') self.stub_flg = True transport = oslo_messaging.get_notification_transport(self.conf) notifier = oslo_messaging.Notifier(transport) def fake_notifier(*args, **kwargs): self.stub_flg = False self.useFixture( fixtures.MockPatchObject(notifier, 'error', fake_notifier)) logrecord = logging.LogRecord(name='name', level='WARN', pathname='/tmp', lineno=1, msg='Message', args=None, exc_info=None) self.publisherrorshandler.emit(logrecord) self.assertTrue(self.stub_flg)
def setUp(self): super(TestReapplyNode, self).setUp() CONF.set_override('processing_hooks', '$processing.default_processing_hooks,example', 'processing') CONF.set_override('store_data', 'swift', 'processing') self.data['macs'] = self.macs self.ports = self.all_ports self.node_info = node_cache.NodeInfo(uuid=self.uuid, started_at=self.started_at, node=self.node) self.node_info.invalidate_cache = mock.Mock() self.cli.port.create.side_effect = self.ports self.cli.node.update.return_value = self.node self.cli.node.list_ports.return_value = [] self.node_info._state = istate.States.finished self.commit_fixture = self.useFixture( fixtures.MockPatchObject(node_cache.NodeInfo, 'commit', autospec=True)) db.Node(uuid=self.node_info.uuid, state=self.node_info._state, started_at=self.node_info.started_at, finished_at=self.node_info.finished_at, error=self.node_info.error).save(self.session)
def setUp(self): super(Base, self).setUp() rules.delete_all() self.cli_fixture = self.useFixture( fixtures.MockPatchObject(ir_utils, 'get_client')) self.cli = self.cli_fixture.mock.return_value self.cli.node.get.return_value = self.node self.cli.node.update.return_value = self.node self.cli.node.list.return_value = [self.node] self.patch = [ {'op': 'add', 'path': '/properties/cpus', 'value': '4'}, {'path': '/properties/cpu_arch', 'value': 'x86_64', 'op': 'add'}, {'op': 'add', 'path': '/properties/memory_mb', 'value': '12288'}, {'path': '/properties/local_gb', 'value': '999', 'op': 'add'} ] self.patch_root_hints = [ {'op': 'add', 'path': '/properties/cpus', 'value': '4'}, {'path': '/properties/cpu_arch', 'value': 'x86_64', 'op': 'add'}, {'op': 'add', 'path': '/properties/memory_mb', 'value': '12288'}, {'path': '/properties/local_gb', 'value': '19', 'op': 'add'} ] self.node.power_state = 'power off' self.cfg = self.useFixture(config_fixture.Config()) conf_file = get_test_conf_file() self.cfg.set_config_files([conf_file]) # FIXME(milan) FakeListener.poll calls time.sleep() which leads to # busy polling with no sleep at all, effectively blocking the whole # process by consuming all CPU cycles in a single thread. MonkeyPatch # with eventlet.sleep seems to help this. self.useFixture(fixtures.MonkeyPatch( 'oslo_messaging._drivers.impl_fake.time.sleep', eventlet.sleep))
def test_verify_extensions_cinder(self): def fake_list_extensions(): return {'extensions': [{'alias': 'fake1'}, {'alias': 'fake2'}, {'alias': 'not_fake'}]} fake_os = mock.MagicMock() # NOTE (e0ne): mock both v1 and v2 APIs fake_os.volumes_extension_client.list_extensions = fake_list_extensions fake_os.volumes_v2_extension_client.list_extensions = ( fake_list_extensions) self.useFixture(fixtures.MockPatchObject( verify_tempest_config, 'get_enabled_extensions', return_value=(['fake1', 'fake2', 'fake3']))) results = verify_tempest_config.verify_extensions(fake_os, 'cinder', {}) self.assertIn('cinder', results) self.assertIn('fake1', results['cinder']) self.assertTrue(results['cinder']['fake1']) self.assertIn('fake2', results['cinder']) self.assertTrue(results['cinder']['fake2']) self.assertIn('fake3', results['cinder']) self.assertFalse(results['cinder']['fake3']) self.assertIn('not_fake', results['cinder']) self.assertFalse(results['cinder']['not_fake'])
def _setup_enforcer_object(self): self.enforcer = rbac_enforcer.enforcer.RBACEnforcer() self.cleanup_instance('enforcer') def register_new_rules(enforcer): rules = self._testing_policy_rules() enforcer.register_defaults(rules) self.useFixture( fixtures.MockPatchObject(self.enforcer, 'register_rules', register_new_rules)) # Set the possible actions to our limited list original_actions = rbac_enforcer.enforcer._POSSIBLE_TARGET_ACTIONS rbac_enforcer.enforcer._POSSIBLE_TARGET_ACTIONS = frozenset( [rule.name for rule in self._testing_policy_rules()]) # RESET the FrozenSet of possible target actions to the original # value self.addCleanup(setattr, rbac_enforcer.enforcer, '_POSSIBLE_TARGET_ACTIONS', original_actions) # Force a reset on the enforcer to load up new policy rules. self.enforcer._reset()
def test_call_build_enforcement_target(self): assertIn = self.assertIn assertEq = self.assertEqual ref_uuid = uuid.uuid4().hex def _enforce_mock_func(credentials, action, target, do_raise=True): assertIn('target.domain.id', target) assertEq(target['target.domain.id'], ref_uuid) def _build_enforcement_target(): return {'domain': {'id': ref_uuid}} self.useFixture(fixtures.MockPatchObject( self.enforcer, '_enforce', _enforce_mock_func)) argument_id = uuid.uuid4().hex with self.test_client() as c: path = '/v3/auth/tokens' body = self._auth_json() r = c.post( path, json=body, follow_redirects=True, expected_status_code=201) token_id = r.headers['X-Subject-Token'] c.get('%s/argument/%s' % (self.restful_api_url_prefix, argument_id), headers={'X-Auth-Token': token_id}) self.enforcer.enforce_call( action='example:allowed', build_target=_build_enforcement_target)
def setUp(self): super(TestIptablesDriver, self).setUp() CONF.set_override('rootwrap_config', '/some/fake/path') # NOTE(milan) we ignore the state checking in order to avoid having to # always call e.g self.driver.init_filter() to set proper driver state self.mock_fsm = self.useFixture( fixtures.MockPatchObject(iptables.IptablesFilter, 'fsm')).mock self.mock_call = self.useFixture( fixtures.MockPatchObject(iptables.processutils, 'execute')).mock self.driver = iptables.IptablesFilter() self.mock_iptables = self.useFixture( fixtures.MockPatchObject(self.driver, '_iptables')).mock self.mock_should_enable_dhcp = self.useFixture( fixtures.MockPatchObject(iptables, '_should_enable_dhcp')).mock self.mock_get_inactive_macs = self.useFixture( fixtures.MockPatchObject(pxe_filter, 'get_inactive_macs')).mock self.mock_get_inactive_macs.return_value = set() self.mock_get_active_macs = self.useFixture( fixtures.MockPatchObject(pxe_filter, 'get_active_macs')).mock self.mock_get_active_macs.return_value = set() self.mock_ironic = mock.Mock() self.mock_ironic.ports.return_value = []
def setUp(self): super(TestDriverReset, self).setUp() self.mock_fsm = self.useFixture( fixtures.MockPatchObject(self.driver, 'fsm')).mock
def setUp(self): super(TestDriver, self).setUp() self.mock_driver = mock.Mock(spec=interface.FilterDriver) self.mock__driver_manager = self.useFixture( fixtures.MockPatchObject(pxe_filter, '_driver_manager')).mock self.mock__driver_manager.return_value.driver = self.mock_driver
def patch(self, obj, attr): """Returns a Mocked object on the patched attribute.""" mockfixture = self.useFixture(fixtures.MockPatchObject(obj, attr)) return mockfixture.mock
def setUp(self): super(FunctionalTestBase, self).setUp() self.pr_counter = 0 self.git_counter = 0 self.cassette_library_dir = os.path.join(CASSETTE_LIBRARY_DIR_BASE, self._testMethodName) # Recording stuffs if RECORD: if os.path.exists(self.cassette_library_dir): shutil.rmtree(self.cassette_library_dir) os.makedirs(self.cassette_library_dir) self.recorder = vcr.VCR( cassette_library_dir=self.cassette_library_dir, record_mode="all" if RECORD else "none", match_on=['method', 'uri'], filter_headers=[ ('Authorization', '<TOKEN>'), ('X-Hub-Signature', '<SIGNATURE>'), ('User-Agent', None), ('Accept-Encoding', None), ('Connection', None), ], before_record_response=self.response_filter, custom_patches=((github.MainClass, 'HTTPSConnection', vcr.stubs.VCRHTTPSConnection), )) self.useFixture( fixtures.MockPatchObject(branch_updater.utils, 'Gitter', lambda: self.get_gitter())) self.useFixture( fixtures.MockPatchObject(backports.utils, 'Gitter', lambda: self.get_gitter())) # Web authentification always pass self.useFixture( fixtures.MockPatch('hmac.compare_digest', return_value=True)) reponame_path = os.path.join(self.cassette_library_dir, "reponame") if RECORD: REPO_UUID = str(uuid.uuid4()) with open(reponame_path, "w") as f: f.write(REPO_UUID) else: with open(reponame_path, "r") as f: REPO_UUID = f.read() self.name = "repo-%s-%s" % (REPO_UUID, self._testMethodName) utils.setup_logging() config.log() self.git = self.get_gitter() self.addCleanup(self.git.cleanup) web.app.testing = True self.app = web.app.test_client() # NOTE(sileht): Prepare a fresh redis self.redis = utils.get_redis_for_cache() self.redis.flushall() self.subscription = {"token": config.MAIN_TOKEN, "subscribed": False} self.redis.set("subscription-cache-%s" % config.INSTALLATION_ID, json.dumps(self.subscription)) # Let's start recording cassette = self.recorder.use_cassette("http.json") cassette.__enter__() self.addCleanup(cassette.__exit__) self.session = requests.Session() self.session.trust_env = False # Cleanup the remote testing redis r = self.session.delete( "https://gh.mergify.io/events-testing", data=FAKE_DATA, headers={"X-Hub-Signature": "sha1=" + FAKE_HMAC}) r.raise_for_status() self.g_main = github.Github(config.MAIN_TOKEN) self.g_fork = github.Github(config.FORK_TOKEN) self.u_main = self.g_main.get_user() self.u_fork = self.g_fork.get_user() assert self.u_main.login == "mergify-test1" assert self.u_fork.login == "mergify-test2" self.r_main = self.u_main.create_repo(self.name) self.url_main = "https://github.com/%s" % self.r_main.full_name self.url_fork = "https://github.com/%s/%s" % (self.u_fork.login, self.r_main.name) # Limit installations/subscription API to the test account install = { "id": config.INSTALLATION_ID, "target_type": "User", "account": { "login": "******" } } self.useFixture( fixtures.MockPatch('mergify_engine.utils.get_installations', lambda integration: [install])) real_get_subscription = utils.get_subscription def fake_subscription(r, install_id): if int(install_id) == config.INSTALLATION_ID: return real_get_subscription(r, install_id) else: return {"token": None, "subscribed": False} self.useFixture( fixtures.MockPatch( "mergify_engine.actions.merge.utils.get_subscription", side_effect=fake_subscription)) self.useFixture( fixtures.MockPatch("mergify_engine.web.utils.get_subscription", side_effect=fake_subscription)) self.useFixture( fixtures.MockPatch( "github.MainClass.Installation.Installation.get_repos", return_value=[self.r_main]))
def _mock_request_method(self, method=None, body=None): return self.useFixture(fixtures.MockPatchObject( self.client, method, autospec=True, return_value=(self.resp, body)) ).mock
def setUp(self): super(TestShouldEnableUnknownHosts, self).setUp() self.mock_introspection_active = self.useFixture( fixtures.MockPatchObject(node_cache, 'introspection_active')).mock
def test_send_receive(self): transport = oslo_messaging.get_transport(self.conf, 'kombu+memory:////') self.addCleanup(transport.cleanup) driver = transport._driver target = oslo_messaging.Target(topic='testtopic') listener = driver.listen(target, None, None)._poll_style_listener senders = [] replies = [] msgs = [] wait_conditions = [] orig_reply_waiter = amqpdriver.ReplyWaiter.wait def reply_waiter(self, msg_id, timeout): if wait_conditions: cond = wait_conditions.pop() with cond: cond.notify() with cond: cond.wait() return orig_reply_waiter(self, msg_id, timeout) self.useFixture( fixtures.MockPatchObject(amqpdriver.ReplyWaiter, 'wait', reply_waiter)) def send_and_wait_for_reply(i, wait_for_reply): replies.append( driver.send(target, {}, {'tx_id': i}, wait_for_reply=wait_for_reply, timeout=None)) while len(senders) < 2: t = threading.Thread(target=send_and_wait_for_reply, args=(len(senders), True)) t.daemon = True senders.append(t) # test the case then msg_id is not set t = threading.Thread(target=send_and_wait_for_reply, args=(len(senders), False)) t.daemon = True senders.append(t) # Start the first guy, receive his message, but delay his polling notify_condition = threading.Condition() wait_conditions.append(notify_condition) with notify_condition: senders[0].start() notify_condition.wait() msgs.extend(listener.poll()) self.assertEqual({'tx_id': 0}, msgs[-1].message) # Start the second guy, receive his message senders[1].start() msgs.extend(listener.poll()) self.assertEqual({'tx_id': 1}, msgs[-1].message) # Reply to both in order, making the second thread queue # the reply meant for the first thread msgs[0].reply({'rx_id': 0}) msgs[1].reply({'rx_id': 1}) # Wait for the second thread to finish senders[1].join() # Start the 3rd guy, receive his message senders[2].start() msgs.extend(listener.poll()) self.assertEqual({'tx_id': 2}, msgs[-1].message) # Verify the _send_reply was not invoked by driver: with mock.patch.object(msgs[2], '_send_reply') as method: msgs[2].reply({'rx_id': 2}) self.assertEqual(0, method.call_count) # Wait for the 3rd thread to finish senders[2].join() # Let the first thread continue with notify_condition: notify_condition.notify() # Wait for the first thread to finish senders[0].join() # Verify replies were received out of order self.assertEqual(len(senders), len(replies)) self.assertEqual({'rx_id': 1}, replies[0]) self.assertIsNone(replies[1]) self.assertEqual({'rx_id': 0}, replies[2])
def setUp(self): self.flags(compute_driver='libvirt.LibvirtDriver') self.useFixture(fakelibvirt.FakeLibvirtFixture()) self.useFixture(fixtures.MockPatchObject(host.Host, 'initialize')) super(TestLibvirtErrorNotificationSample, self).setUp()
def setUp(self): self.fake_http = fake_http.fake_httplib2() super(TestRestClientUpdateHeaders, self).setUp() self.useFixture( fixtures.MockPatchObject(self.rest_client, '_error_checker')) self.headers = {'X-Configuration-Session': 'session_id'}
def test_head(self): self.useFixture( fixtures.MockPatchObject(self.rest_client, 'response_checker')) __, return_dict = self.rest_client.head(self.url) self.assertEqual('HEAD', return_dict['method'])
def setUp(self): self.fake_http = fake_http.fake_httplib2() super(TestRestClientHTTPMethods, self).setUp() self.useFixture( fixtures.MockPatchObject(self.rest_client, '_error_checker'))
def _patch_ob(self, *a, **kw): self.useFixture(fixtures.MockPatchObject(*a, **kw))
def test_head(self): self.useFixture( fixtures.MockPatchObject(self.rest_client, 'response_checker')) resp, __ = self.rest_client.head(self.url) self._verify_headers(resp)