def test_object_graph_partial_use(): """ Partial initialization succeeds partially and is recoverable. """ registry = Registry() create_first = Mock() create_first.return_value = "first" create_second = Mock() create_second.side_effect = [Exception, "second"] create_third = Mock() create_third.side_effect = "third" registry.bind("first", create_first) registry.bind("second", create_second) registry.bind("third", create_third) graph = create_object_graph("test", registry=registry) # exception raised from initial call to create_second assert_that(calling(graph.use).with_args("first", "second", "third"), raises(Exception)) # first and second were called, but not third assert_that(create_first.call_count, is_(equal_to(1))) assert_that(create_second.call_count, is_(equal_to(1))) assert_that(create_third.call_count, is_(equal_to(0))) # second call succeeds [first, second, third] = graph.use("first", "second", "third") # first was not called, second was called again, and third called for the first time assert_that(create_first.call_count, is_(equal_to(1))) assert_that(create_second.call_count, is_(equal_to(2))) assert_that(create_third.call_count, is_(equal_to(1)))
def test_rmtree_ignore_unlink_rmdir_exception(self): dir1_list = ["dir2", "file"] empty_list = [] mock_listdir = Mock() mock_listdir.side_effect = [dir1_list, empty_list] mock_isdir = Mock() mock_isdir.side_effect = [True, False] mock_unlink = Mock() mock_unlink.side_effect = [OSError] mock_rmdir = Mock() mock_rmdir.side_effect = [0, OSError] mock_islink = Mock() mock_islink.return_value = False with nested(patch("gluster.gfapi.Volume.listdir", mock_listdir), patch("gluster.gfapi.Volume.isdir", mock_isdir), patch("gluster.gfapi.Volume.islink", mock_islink), patch("gluster.gfapi.Volume.unlink", mock_unlink), patch("gluster.gfapi.Volume.rmdir", mock_rmdir)): self.vol.rmtree("dir1", True) mock_rmdir.assert_any_call("dir1/dir2") mock_unlink.assert_called_once_with("dir1/file") mock_rmdir.assert_called_with("dir1")
def test_agent_policies(self): # set up data gc = Mock() service_key = "service_key" resource_id = "resource_id" pdpm = PolicyDecisionPointManager(gc) invocation = Mock() mock_header = Mock() invocation.message_annotations = {} invocation.message = {"argument1": 0} invocation.headers = { "op": "op", "process": "process", "request": "request", "ion-actor-id": "ion-actor-id", "receiver": "resource-registry", "sender-type": "sender-type", "sender-service": "Unknown", "ion-actor-roles": {"org_name": ["SUPERUSER"]}, } invocation.get_message_receiver.return_value = "service_key" invocation.get_service_name.return_value = "Unknown" invocation.get_message_sender.return_value = ["Unknown", "Unknown"] def get_header_value(key, default): return invocation.headers.get(key, default) mock_header.side_effect = get_header_value invocation.get_header_value = mock_header mock_args = Mock() process = Mock() process.org_governance_name = "org_name" process.resource_id = "resource_id" invocation.args = {"process": process} def get_arg_value(key, default="Unknown"): return invocation.args.get(key, default) mock_args.side_effect = get_arg_value invocation.get_arg_value = mock_args gc.system_root_org_name = "sys_org_name" # check that service policies result in denying the request pdpm.set_service_policy_rules(service_key, self.deny_SUPERUSER_rule) pdpm.set_resource_policy_rules(resource_id, self.permit_SUPERUSER_rule) response = pdpm.check_agent_request_policies(invocation) self.assertEqual(response.value, "Deny") # check that resource policies result in denying the request pdpm.set_service_policy_rules(service_key, self.permit_SUPERUSER_rule) pdpm.set_resource_policy_rules(resource_id, self.deny_SUPERUSER_rule) response = pdpm.check_agent_request_policies(invocation) self.assertEqual(response.value, "Deny") # check that both service and resource policies need to allow a request pdpm.set_service_policy_rules(service_key, self.permit_SUPERUSER_rule) pdpm.set_resource_policy_rules(resource_id, self.permit_SUPERUSER_rule) response = pdpm.check_agent_request_policies(invocation) self.assertEqual(response.value, "Permit")
def test_rmtree_success(self): dir1_list = ["dir2", "file"] empty_list = [] mock_listdir = Mock() mock_listdir.side_effect = [dir1_list, empty_list] mock_isdir = Mock() mock_isdir.side_effect = [True, False] mock_unlink = Mock() mock_unlink.return_value = 0 mock_rmdir = Mock() mock_rmdir.return_value = 0 mock_islink = Mock() mock_islink.return_value = False with nested( patch("glusterfs.gfapi.Volume.listdir", mock_listdir), patch("glusterfs.gfapi.Volume.isdir", mock_isdir), patch("glusterfs.gfapi.Volume.islink", mock_islink), patch("glusterfs.gfapi.Volume.unlink", mock_unlink), patch("glusterfs.gfapi.Volume.rmdir", mock_rmdir), ): self.vol.rmtree("dir1") mock_rmdir.assert_any_call("dir1/dir2") mock_unlink.assert_called_once_with("dir1/file") mock_rmdir.assert_called_with("dir1")
def test_get_cuts(self): gps_station = (datetime_to_gps(datetime(2014, 1, 1, 10, 3)), datetime_to_gps(datetime(2014, 3, 1, 11, 32))) gps_ref_station = (datetime_to_gps(datetime(2014, 1, 5, 0, 1, 1)), datetime_to_gps(datetime(2014, 3, 5, 3, 34, 4))) elec_station = (datetime_to_gps(datetime(2014, 1, 3, 3, 34, 3)), datetime_to_gps(datetime(2014, 3, 5, 23, 59, 59))) elec_ref_station = (datetime_to_gps(datetime(2014, 1, 9, 0, 0, 0)), datetime_to_gps(datetime(2014, 3, 15, 1, 2, 3))) gps_mock = Mock() elec_mock = Mock() gps_mock.side_effect = [array(gps_station), array(gps_ref_station)] elec_mock.side_effect = [array(elec_station), array(elec_ref_station)] self.off._get_electronics_timestamps = elec_mock self.off._get_gps_timestamps = gps_mock cuts = self.off._get_cuts(sentinel.station, sentinel.ref_station) elec_mock.assert_has_calls([call(sentinel.ref_station), call(sentinel.station)], any_order=True) gps_mock.assert_has_calls([call(sentinel.ref_station), call(sentinel.station)], any_order=True) self.assertEqual(len(cuts), 8) six.assertCountEqual(self, sorted(cuts), cuts) self.assertEqual(cuts[0], datetime(2014, 1, 1)) today = datetime.now() self.assertEqual(cuts[-1], datetime(today.year, today.month, today.day))
def test_side_effect(self): mock = Mock() def effect(*args, **kwargs): raise SystemError('kablooie') mock.side_effect = effect self.assertRaises(SystemError, mock, 1, 2, fish=3) mock.assert_called_with(1, 2, fish=3) results = [1, 2, 3] def effect(): return results.pop() mock.side_effect = effect self.assertEqual([mock(), mock(), mock()], [3, 2, 1], "side effect not used correctly") mock = Mock(side_effect=sentinel.SideEffect) self.assertEqual(mock.side_effect, sentinel.SideEffect, "side effect in constructor not used") def side_effect(): return DEFAULT mock = Mock(side_effect=side_effect, return_value=sentinel.RETURN) self.assertEqual(mock(), sentinel.RETURN)
def test_service_policies(self): gc = Mock() service_key = 'service_key' pdpm = PolicyDecisionPointManager(gc) # see that the PDP for service is the default self.assertEqual(pdpm.get_service_pdp(service_key), pdpm.load_common_service_pdp) pdpm.load_service_policy_rules(service_key, self.permit_ION_MANAGER_rule) # see that the PDP for service is not the default anymore self.assertNotEqual(pdpm.get_service_pdp(service_key), pdpm.load_common_service_pdp) # check request without a service_key raises NotFound error invocation = Mock() invocation.message_annotations = {} invocation.get_message_receiver.return_value = None with self.assertRaises(NotFound) as chk_res: pdpm.check_service_request_policies(invocation) self.assertIn(chk_res.exception.message, 'No receiver for this message') # check that, because actor does not have ION_MANAGER role, policy evaluates to a denial # (really Not Applicable, because of the inelegant hack of a policy we are setting up our pdp with) mock_header = Mock() invocation.message_annotations = {} invocation.message = {'argument1': 0} invocation.headers = {'op': 'op', 'process': 'process', 'request': 'request', 'ion-actor-id': 'ion-actor-id', 'receiver': 'resource-registry', 'sender-type': 'sender-type', 'sender-service': 'Unknown', 'ion-actor-roles': {'org_name': ['ion-actor-roles']}} invocation.get_message_receiver.return_value = 'service_key' invocation.get_service_name.return_value = 'Unknown' invocation.get_message_sender.return_value = ['Unknown','Unknown'] def get_header_value(key, default): return invocation.headers.get(key, default) mock_header.side_effect = get_header_value invocation.get_header_value = mock_header mock_args = Mock() process = Mock() process.org_governance_name = 'org_name' invocation.args = {'process': process} def get_arg_value(key, default): return invocation.args.get(key, default) mock_args.side_effect = get_arg_value invocation.get_arg_value = mock_args gc.system_root_org_name = 'sys_org_name' response = pdpm.check_service_request_policies(invocation) self.assertEqual(response.value, "NotApplicable") # check that policy evaluates to Permit because actor has ION_MANAGER role invocation.message_annotations = {} invocation.headers = {'op': 'op', 'process': 'process', 'request': 'request', 'ion-actor-id': 'ion-actor-id', 'receiver': 'resource-registry', 'sender-type': 'sender-type', 'sender-service': 'sender-service', 'ion-actor-roles': {'sys_org_name': ['ION_MANAGER']}} response = pdpm.check_service_request_policies(invocation) self.assertEqual(response.value, "Permit")
def test_makedirs_success(self): mock_glfs_mkdir = Mock() mock_glfs_mkdir.side_effect = [0, 0] mock_exists = Mock() mock_exists.side_effect = (False, True, False) with nested(patch("gluster.gfapi.api.glfs_mkdir", mock_glfs_mkdir), patch("gluster.gfapi.Volume.exists", mock_exists)): self.vol.makedirs("dir1/", 0775) self.assertEqual(mock_glfs_mkdir.call_count, 1) mock_glfs_mkdir.assert_any_call(self.vol.fs, "dir1/", 0775)
def test_notification_dbus(self): ''' Test mocked Linux DBus for plyer.notification. ''' notif = platform_import( platform='linux', module_name='notification' ) self.assertIn('NotifyDbus', dir(notif)) # (3) mocked Interface called from dbus interface = Mock() interface.side_effect = (interface, ) # (2) mocked SessionBus called from dbus session_bus = Mock() session_bus.side_effect = (session_bus, ) # (1) mocked dbus for import dbus = Mock(SessionBus=session_bus, Interface=interface) # inject the mocked module self.assertNotIn('dbus', sys.modules) sys.modules['dbus'] = dbus try: notif = notif.instance() self.assertIn('NotifyDbus', str(notif)) # call notify() self.show_notification(notif) # check whether Mocks were called dbus.SessionBus.assert_called_once() session_bus.get_object.assert_called_once_with( 'org.freedesktop.Notifications', '/org/freedesktop/Notifications' ) interface.Notify.assert_called_once_with( TestNotification.data['app_name'], 0, TestNotification.data['app_icon'], TestNotification.data['title'], TestNotification.data['message'], [], [], TestNotification.data['timeout'] * 1000 ) finally: del sys.modules['dbus'] self.assertNotIn('dbus', sys.modules)
def test_agent_policies(self): # set up data gc = Mock() service_key = 'service_key' resource_id = 'resource_id' pdpm = PolicyDecisionPointManager(gc) invocation = Mock() mock_header = Mock() invocation.message_annotations = {} invocation.message = {'argument1': 0 } invocation.headers = {'op': 'op', 'process': 'process', 'request': 'request', 'ion-actor-id': 'ion-actor-id', 'receiver': 'resource-registry', 'sender-type': 'sender-type', 'sender-service': 'Unknown', 'ion-actor-roles': {'org_name': ['ION_MANAGER']}} invocation.get_message_receiver.return_value = 'service_key' invocation.get_service_name.return_value = 'Unknown' invocation.get_message_sender.return_value = ['Unknown','Unknown'] def get_header_value(key, default): return invocation.headers.get(key, default) mock_header.side_effect = get_header_value invocation.get_header_value = mock_header mock_args = Mock() process = Mock() process.org_governance_name = 'org_name' process.resource_id = 'resource_id' invocation.args = {'process': process} def get_arg_value(key, default='Unknown'): return invocation.args.get(key, default) mock_args.side_effect = get_arg_value invocation.get_arg_value = mock_args gc.system_root_org_name = 'sys_org_name' # check that service policies result in denying the request pdpm.load_service_policy_rules(service_key, self.deny_ION_MANAGER_rule) pdpm.load_resource_policy_rules(resource_id, self.permit_ION_MANAGER_rule) response = pdpm.check_agent_request_policies(invocation) self.assertEqual(response.value, "Deny") # check that resource policies result in denying the request pdpm.load_service_policy_rules(service_key, self.permit_ION_MANAGER_rule) pdpm.load_resource_policy_rules(resource_id, self.deny_ION_MANAGER_rule) response = pdpm.check_agent_request_policies(invocation) self.assertEqual(response.value, "Deny") # check that both service and resource policies need to allow a request pdpm.load_service_policy_rules(service_key, self.permit_ION_MANAGER_rule) pdpm.load_resource_policy_rules(resource_id, self.permit_ION_MANAGER_rule) response = pdpm.check_agent_request_policies(invocation) self.assertEqual(response.value, "Permit")
def test_fire_timers_raises(self): hub = Hub() eback = Mock() eback.side_effect = KeyError('foo') hub.timer = Mock() hub.scheduler = iter([(0, eback)]) with self.assertRaises(KeyError): hub.fire_timers(propagate=(KeyError, )) eback.side_effect = ValueError('foo') hub.scheduler = iter([(0, eback)]) with patch('kombu.async.hub.logger') as logger: with self.assertRaises(StopIteration): hub.fire_timers() self.assertTrue(logger.error.called)
def test_makedirs_success_EEXIST(self): err = errno.EEXIST mock_glfs_mkdir = Mock() mock_glfs_mkdir.side_effect = [OSError(err, os.strerror(err)), 0] mock_exists = Mock() mock_exists.side_effect = [False, True, False] with nested(patch("gluster.gfapi.api.glfs_mkdir", mock_glfs_mkdir), patch("gluster.gfapi.Volume.exists", mock_exists)): self.vol.makedirs("./dir1/dir2", 0775) self.assertEqual(mock_glfs_mkdir.call_count, 2) mock_glfs_mkdir.assert_any_call(self.vol.fs, "./dir1", 0775) mock_glfs_mkdir.assert_called_with(self.vol.fs, "./dir1/dir2", 0775)
def test_walk_success(self): dir1_list = ["dir2", "file"] empty_list = [] mock_listdir = Mock() mock_listdir.side_effect = [dir1_list, empty_list] mock_isdir = Mock() mock_isdir.side_effect = [True, False] with nested(patch("gluster.gfapi.Volume.listdir", mock_listdir), patch("gluster.gfapi.Volume.isdir", mock_isdir)): for (path, dirs, files) in self.vol.walk("dir1"): self.assertEqual(dirs, ['dir2']) self.assertEqual(files, ['file']) break
def test_main(self): mock_recon_check = Mock() mock_recon_check.side_effect = [['a'], ['b'], ['c']] with patch(self.module + '_recon_check', mock_recon_check): actual = replication.main() self.assertIsInstance(actual, list) self.assertListEqual(['a', 'b', 'c'], actual) # Tests account_recon_check in isolation mock_recon_check = Mock() mock_recon_check.side_effect = [['a']] with patch(self.module + '_recon_check', mock_recon_check): with patch(self.module + 'object_recon_check') as rc1: with patch(self.module + 'container_recon_check') as rc2: actual = replication.main() self.assertIsInstance(actual, list) self.assertListEqual(['a'], actual) self.assertTrue(rc1.called) self.assertTrue(rc2.called) # Tests container_recon_check in isolation mock_recon_check = Mock() mock_recon_check.side_effect = [['a']] with patch(self.module + '_recon_check', mock_recon_check): with patch(self.module + 'object_recon_check') as rc1: with patch(self.module + 'account_recon_check') as rc2: actual = replication.main() self.assertIsInstance(actual, list) self.assertListEqual(['a'], actual) self.assertTrue(rc1.called) self.assertTrue(rc2.called) # Tests object_recon_check in isolation mock_recon_check = Mock() mock_recon_check.side_effect = [['a']] with patch(self.module + '_recon_check', mock_recon_check): with patch(self.module + 'container_recon_check') as rc1: with patch(self.module + 'account_recon_check') as rc2: actual = replication.main() self.assertIsInstance(actual, list) self.assertListEqual(['a'], actual) self.assertTrue(rc1.called) self.assertTrue(rc2.called)
def test_resource_policies(self): gc = Mock() resource_id = 'resource_key' pdpm = PolicyDecisionPointManager(gc) # see that the PDP for resource is empty self.assertEqual(pdpm.get_resource_pdp(resource_id), pdpm.empty_pdp) pdpm.load_resource_policy_rules(resource_id, self.permit_ION_MANAGER_rule) # see that the PDP for resource is not empty anymore self.assertNotEqual(pdpm.get_resource_pdp(resource_id), pdpm.empty_pdp) # check request without a resource_id raises NotFound error self.invocation = Mock() with self.assertRaises(NotFound) as chk_res: pdpm.check_resource_request_policies(self.invocation, None) self.assertIn(chk_res.exception.message, 'The resource_id is not set') # check that, because actor does not have ION_MANAGER role, policy evaluates to a denial # (really Not Applicable, because of the inelegant hack of a policy we are setting up our pdp with) mock_header = Mock() self.invocation.headers = {'op': 'op', 'process': 'process', 'request': 'request', 'ion-actor-id': 'ion-actor-id', 'receiver': 'resource-registry', 'sender-type': 'sender-type', 'sender-service': 'sender-service', 'ion-actor-roles': {'org_name': ['ion-actor-roles']}} def get_header_value(key, default): return self.invocation.headers.get(key, default) mock_header.side_effect = get_header_value self.invocation.get_header_value = mock_header mock_args = Mock() process = Mock() process.org_name = 'org_name' self.invocation.args = {'process': process} def get_arg_value(key, default): return self.invocation.args.get(key, default) mock_args.side_effect = get_arg_value self.invocation.get_arg_value = mock_args gc.system_root_org_name = 'sys_org_name' response = pdpm.check_resource_request_policies(self.invocation, resource_id) self.assertEqual(response.value, "NotApplicable") # check that policy evaluates to Permit because actor has ION_MANAGER role self.invocation.headers = {'op': 'op', 'process': 'process', 'request': 'request', 'ion-actor-id': 'ion-actor-id', 'receiver': 'resource-registry', 'sender-type': 'sender-type', 'sender-service': 'sender-service', 'ion-actor-roles': {'sys_org_name': ['ION_MANAGER']}} response = pdpm.check_resource_request_policies(self.invocation, resource_id) self.assertEqual(response.value, "Permit")
def test_trigger_on_huddle_message_from_user(self, mock_queue_json_publish: mock.Mock) -> None: for bot_type, expected_queue_name in BOT_TYPE_TO_QUEUE_NAME.items(): self.bot_profile.bot_type = bot_type self.bot_profile.save() self.second_bot_profile.bot_type = bot_type self.second_bot_profile.save() sender_email = self.user_profile.email recipient_emails = [self.bot_profile.email, self.second_bot_profile.email] profile_ids = [self.bot_profile.id, self.second_bot_profile.id] def check_values_passed(queue_name: Any, trigger_event: Union[Mapping[Any, Any], Any], x: Callable[[Any], None]=None) -> None: self.assertEqual(queue_name, expected_queue_name) self.assertIn(trigger_event["user_profile_id"], profile_ids) profile_ids.remove(trigger_event["user_profile_id"]) self.assertEqual(trigger_event["trigger"], "private_message") self.assertEqual(trigger_event["message"]["sender_email"], sender_email) self.assertEqual(trigger_event["message"]["type"], u'private') mock_queue_json_publish.side_effect = check_values_passed self.send_huddle_message(sender_email, recipient_emails, 'test') self.assertEqual(mock_queue_json_publish.call_count, 2) mock_queue_json_publish.reset_mock()
def test_trigger_on_personal_message_from_user(self, mock_queue_json_publish: mock.Mock) -> None: for bot_type, expected_queue_name in BOT_TYPE_TO_QUEUE_NAME.items(): self.bot_profile.bot_type = bot_type self.bot_profile.save() sender_email = self.user_profile.email recipient_email = self.bot_profile.email def check_values_passed(queue_name: Any, trigger_event: Union[Mapping[Any, Any], Any], x: Callable[[Any], None]=None) -> None: self.assertEqual(queue_name, expected_queue_name) self.assertEqual(trigger_event["user_profile_id"], self.bot_profile.id) self.assertEqual(trigger_event["trigger"], "private_message") self.assertEqual(trigger_event["message"]["sender_email"], sender_email) display_recipients = [ trigger_event["message"]["display_recipient"][0]["email"], trigger_event["message"]["display_recipient"][1]["email"], ] self.assertTrue(sender_email in display_recipients) self.assertTrue(recipient_email in display_recipients) mock_queue_json_publish.side_effect = check_values_passed self.send_personal_message(sender_email, recipient_email, 'test') self.assertTrue(mock_queue_json_publish.called)
def test_callback_with_exception_multiple_calls(self): class EchoException(Exception): pass class Echo(object): def error(self): raise exc echo = Echo() exc = EchoException("error!") callback = Mock() callback.side_effect = [False, True] with wait_for_call(echo, 'error', callback): with pytest.raises(EchoException): echo.error() with pytest.raises(EchoException): echo.error() assert callback.called assert callback.call_args_list == [ call((), {}, None, (EchoException, exc, ANY)), call((), {}, None, (EchoException, exc, ANY)) ]
def test_trigger_on_stream_mention_from_user(self, mock_queue_json_publish: mock.Mock) -> None: for bot_type, expected_queue_name in BOT_TYPE_TO_QUEUE_NAME.items(): self.bot_profile.bot_type = bot_type self.bot_profile.save() content = u'@**FooBot** foo bar!!!' recipient = 'Denmark' trigger = 'mention' message_type = Recipient._type_names[Recipient.STREAM] def check_values_passed(queue_name: Any, trigger_event: Union[Mapping[Any, Any], Any], x: Callable[[Any], None]=None) -> None: self.assertEqual(queue_name, expected_queue_name) self.assertEqual(trigger_event["message"]["content"], content) self.assertEqual(trigger_event["message"]["display_recipient"], recipient) self.assertEqual(trigger_event["message"]["sender_email"], self.user_profile.email) self.assertEqual(trigger_event["message"]["type"], message_type) self.assertEqual(trigger_event['trigger'], trigger) self.assertEqual(trigger_event['user_profile_id'], self.bot_profile.id) mock_queue_json_publish.side_effect = check_values_passed self.send_stream_message( self.user_profile.email, 'Denmark', content) self.assertTrue(mock_queue_json_publish.called)
def test_request_cached_with_arg_map_function(self): """ Ensure that calling a decorated function uses arg_map_function to determined the cache key. """ to_be_wrapped = Mock() to_be_wrapped.side_effect = [1, 2, 3] self.assertEqual(to_be_wrapped.call_count, 0) def mock_wrapper(*args, **kwargs): """Simple wrapper to let us decorate our mock.""" return to_be_wrapped(*args, **kwargs) arg_map_function = lambda arg: six.text_type(arg == 1) wrapped = request_cached(arg_map_function=arg_map_function)(mock_wrapper) # This will be a miss, and make an underlying call. result = wrapped(1) self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 1) # This will be a miss, and make an underlying call. result = wrapped(2) self.assertEqual(result, 2) self.assertEqual(to_be_wrapped.call_count, 2) # These will be a hits, and not make an underlying call. result = wrapped(1) self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 2) result = wrapped(3) self.assertEqual(result, 2) self.assertEqual(to_be_wrapped.call_count, 2)
def put_mock(fabric_integration, monkeypatch): put = Mock() def _put(*args, **kw): try: expected = put.expected.pop(0) except IndexError: # pragma: nocover expected = ((), {}) eargs, ekw = expected assert len(args) == len(eargs) for arg, earg in zip(args, eargs): if earg is object: continue if hasattr(arg, 'name'): assert arg.name == earg else: assert arg == earg assert sorted(kw.keys()) == sorted(ekw.keys()) for k in kw: if ekw[k] is object: continue assert kw[k] == ekw[k], "kw['%s'](%r) != ekw['%s'](%r)" % (k, kw[k], k, ekw[k]) put.side_effect = _put put.expected = [] monkeypatch.setattr('bsdploy.bootstrap_utils.put', put) monkeypatch.setattr('fabric.contrib.files.put', put) return put
def do_test_sendMessage(self, **mnKwargs): fakeSenderFactory = Mock() fakeSenderFactory.side_effect = lambda *args, **kwargs: args[ 5].callback(True) self.patch(mail, 'ESMTPSenderFactory', fakeSenderFactory) _, builds = yield self.setupBuildResults(SUCCESS) mn = yield self.setupMailNotifier('*****@*****.**', **mnKwargs) mn.messageFormatter = Mock(spec=mn.messageFormatter) mn.messageFormatter.formatMessageForBuildResults.return_value = {"body": "body", "type": "text", "subject": "subject"} mn.findInterrestedUsersEmails = Mock( spec=mn.findInterrestedUsersEmails) mn.findInterrestedUsersEmails.return_value = list("<recipients>") mn.processRecipients = Mock(spec=mn.processRecipients) mn.processRecipients.return_value = list("<processedrecipients>") mn.createEmail = Mock(spec=mn.createEmail) mn.createEmail.return_value.as_string = Mock(return_value="<email>") yield mn.buildMessage("mybldr", builds, SUCCESS) defer.returnValue((mn, builds))
def test_request_cached_with_caches_despite_changing_wrapped_result(self): """ Ensure that after caching a result, we always send it back, even if the underlying result changes. """ to_be_wrapped = Mock() to_be_wrapped.side_effect = [1, 2, 3] self.assertEqual(to_be_wrapped.call_count, 0) def mock_wrapper(*args, **kwargs): """Simple wrapper to let us decorate our mock.""" return to_be_wrapped(*args, **kwargs) wrapped = request_cached()(mock_wrapper) result = wrapped() self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 1) result = wrapped() self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 1) direct_result = mock_wrapper() self.assertEqual(direct_result, 2) self.assertEqual(to_be_wrapped.call_count, 2) result = wrapped() self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 2) direct_result = mock_wrapper() self.assertEqual(direct_result, 3) self.assertEqual(to_be_wrapped.call_count, 3)
def testReset(self): parent = Mock() spec = ["something"] mock = Mock(name="child", parent=parent, spec=spec) mock(sentinel.Something, something=sentinel.SomethingElse) something = mock.something mock.something() mock.side_effect = sentinel.SideEffect return_value = mock.return_value return_value() mock.reset_mock() self.assertEqual(mock._name, "child", "name incorrectly reset") self.assertEqual(mock._parent, parent, "parent incorrectly reset") self.assertEqual(mock._methods, spec, "methods incorrectly reset") self.assertFalse(mock.called, "called not reset") self.assertEqual(mock.call_count, 0, "call_count not reset") self.assertEqual(mock.call_args, None, "call_args not reset") self.assertEqual(mock.call_args_list, [], "call_args_list not reset") self.assertEqual(mock.method_calls, [], "method_calls not initialised correctly: %r != %r" % (mock.method_calls, [])) self.assertEqual(mock.side_effect, sentinel.SideEffect, "side_effect incorrectly reset") self.assertEqual(mock.return_value, return_value, "return_value incorrectly reset") self.assertFalse(return_value.called, "return value mock not reset") self.assertEqual(mock._children, {'something': something}, "children reset incorrectly") self.assertEqual(mock.something, something, "children incorrectly cleared") self.assertFalse(mock.something.called, "child not reset")
def test_listdir_with_stat_success(self): mock_glfs_opendir = Mock() mock_glfs_opendir.return_value = 2 dirent1 = api.Dirent() dirent1.d_name = b"mockfile" dirent1.d_reclen = 8 stat1 = api.Stat() stat1.st_nlink = 1 dirent2 = api.Dirent() dirent2.d_name = b"mockdir" dirent2.d_reclen = 7 stat2 = api.Stat() stat2.st_nlink = 2 dirent3 = api.Dirent() dirent3.d_name = b"." dirent3.d_reclen = 1 stat3 = api.Stat() stat3.n_link = 2 mock_Dir_next = Mock() mock_Dir_next.side_effect = [(dirent1, stat1), (dirent2, stat2), (dirent3, stat3), StopIteration] with patch("gluster.gfapi.api.glfs_opendir", mock_glfs_opendir): with patch("gluster.gfapi.Dir.__next__", mock_Dir_next): with patch("gluster.gfapi.Dir.next", mock_Dir_next): d = self.vol.listdir_with_stat("testdir") self.assertEqual(len(d), 2) self.assertEqual(d[0][0], 'mockfile') self.assertEqual(d[0][1].st_nlink, 1) self.assertEqual(d[1][0], 'mockdir') self.assertEqual(d[1][1].st_nlink, 2)
def test__create(self): manager = base.Manager() manager.api = Mock() manager.api.client = Mock() response_key = "response_key" data_ = "test-data" body_ = {response_key: data_} url_ = "test_url_post" manager.api.client.post = Mock(return_value=(url_, body_)) return_raw = True r = manager._create(url_, body_, response_key, return_raw) self.assertEqual(data_, r) return_raw = False @contextlib.contextmanager def completion_cache_mock(*arg, **kwargs): yield mock = Mock() mock.side_effect = completion_cache_mock manager.completion_cache = mock manager.resource_class = Mock(return_value="test-class") r = manager._create(url_, body_, response_key, return_raw) self.assertEqual("test-class", r)
def setUp(self): super(ManagerListTest, self).setUp() @contextlib.contextmanager def completion_cache_mock(*arg, **kwargs): yield self.manager = base.Manager() self.manager.api = Mock() self.manager.api.client = Mock() self.response_key = "response_key" self.data_p = ["p1", "p2"] self.body_p = {self.response_key: self.data_p} self.url_p = "test_url_post" self.manager.api.client.post = Mock(return_value=(self.url_p, self.body_p)) self.data_g = ["g1", "g2", "g3"] self.body_g = {self.response_key: self.data_g} self.url_g = "test_url_get" self.manager.api.client.get = Mock(return_value=(self.url_g, self.body_g)) mock = Mock() mock.side_effect = completion_cache_mock self.manager.completion_cache = mock
def test_incoming_with_add(self): class MockResource(ModelResource): key = Mock() model_class = mock_orm.Model fields = [ AttributeField(attribute='bar', type=int), ] field = URIListResourceField(attribute='foos', resource_class=MockResource) source_dict = { 'foos': ['uri://resources/1', 'uri://resources/2'] } target_object = mock_orm.Mock() related_manager = mock_orm.Manager() related_manager.all = Mock(return_value=mock_orm.QuerySet()) target_object.foos = related_manager ctx = mock_context() foo1_model = Mock() foo2_model = Mock() mock_resources = Mock() resource1 = MockResource(foo1_model) resource1.key = 1 resource2 = MockResource(foo2_model) resource2.key = 2 mock_resources.side_effect = [resource1, resource2] ctx.resolve_resource_uri = mock_resources field.handle_incoming(ctx, source_dict, target_object) related_manager.add.assert_called_with(foo1_model, foo2_model)
def test_callback_multiple_calls(self): class Echo(object): count = 0 def upper(self, arg): self.count += 1 return "{}-{}".format(arg.upper(), self.count) echo = Echo() arg = "hello" callback = Mock() callback.side_effect = [False, True] with wait_for_call(echo, 'upper', callback): res1 = echo.upper(arg) assert res1 == "HELLO-1" res2 = echo.upper(arg) assert res2 == "HELLO-2" assert callback.called assert callback.call_args_list == [ call((arg,), {}, res1, None), call((arg,), {}, res2, None), ]
def test_request_cached_with_request_cache_getter(self): """ Ensure that calling a decorated function uses request_cache_getter if supplied. """ to_be_wrapped = Mock() to_be_wrapped.side_effect = [1, 2, 3] self.assertEqual(to_be_wrapped.call_count, 0) def mock_wrapper(*args, **kwargs): """Simple wrapper to let us decorate our mock.""" return to_be_wrapped(*args, **kwargs) request_cache_getter = lambda args, kwargs: RequestCache('test') wrapped = request_cached(request_cache_getter=request_cache_getter)(mock_wrapper) # This will be a miss, and make an underlying call. result = wrapped(1) self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 1) # This will be a miss, and make an underlying call. result = wrapped(2) self.assertEqual(result, 2) self.assertEqual(to_be_wrapped.call_count, 2) # These will be a hits, and not make an underlying call. result = wrapped(1) self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 2) # Ensure the appropriate request cache was used self.assertFalse(RequestCache().data) self.assertTrue(RequestCache('test').data)
def test_delay(self): import time # delay in seconds delay = 1 retries = 3 cb = Mock() cb.side_effect = Exception() mtq = MemoryTaskQueue(cb, max_retries=retries, delay=delay) mtq.put('hello') start_time = time.time() mtq.wait() execution_time = time.time() - start_time assert int(execution_time) == (retries+1) * delay
def test_absolute_http_nested_req_file_in_local(self, finder, monkeypatch): """ Test a nested req file url in a local req file """ req_file = '/path/req_file.txt' def parse(*args, **kwargs): return iter([]) mock_parse = Mock() mock_parse.side_effect = parse monkeypatch.setattr(pip._internal.req.req_file, 'parse_requirements', mock_parse) list(process_line("-r http://me.com/me/reqs.txt", req_file, 1, finder=finder)) call = mock_parse.mock_calls[0] assert call[1][0] == 'http://me.com/me/reqs.txt'
def test_retry_boto_call_pauses_when_request_limit_hit(self, mock_sleep): mock_func = Mock() mock_func.side_effect = [ ClientError( { "Error": { "Code": "Throttling", "Message": "Request limit hit" } }, sentinel.operation), sentinel.response ] # The attribute function.__name__ is required by the decorator @wraps. mock_func.__name__ = "mock_func" _retry_boto_call(mock_func)() mock_sleep.assert_called_once_with(1)
def test_send_error_other(self): message = WebhookRequest( MockNotification(webhook_message_data={'data': 'value'}), 'https://www.thebluealliance.com', 'secret') error_mock = Mock() error_mock.side_effect = Exception('testing') with patch.object(urllib2, 'urlopen', error_mock) as mock_urlopen, patch.object( message, 'defer_track_notification') as mock_track: success = message.send() mock_urlopen.assert_called_once() mock_track.assert_not_called() self.assertTrue(success)
def test_start_timezone( self, mock_purge: Mock, mock_get_sessions: Mock, mock_docket: Mock, mock_tracker: Mock, mock_create_session: Mock, mock_update_phase: Mock, mock_region: Mock, mock_supported: Mock, mock_environment: Mock, ) -> None: """Tests that the start operation chains together the correct calls.""" mock_docket.return_value = None mock_tracker.return_value = None mock_get_sessions.return_value = iter([]) mock_purge.return_value = None mock_environment.return_value = "production" mock_scraper = create_autospec(BaseScraper) mock_region.return_value = fake_region(environment="production", scraper=mock_scraper) mock_supported.side_effect = _MockSupported region = "all" scrape_type = constants.ScrapeType.BACKGROUND scrape_key = ScrapeKey("us_wy", scrape_type) request_args = { "region": region, "scrape_type": scrape_type.value, "timezone": "America/Los_Angeles", } headers = {"X-Appengine-Cron": "test-cron"} response = self.client.get("/start", query_string=request_args, headers=headers) assert response.status_code == 200 mock_purge.assert_called_with(scrape_key, "scraper_batch") mock_docket.assert_called_with(scrape_key, "", "") mock_tracker.assert_called_with(scrape_key) mock_create_session.assert_called_with(scrape_key) mock_update_phase.assert_called_with(mock_create_session.return_value, scrape_phase.ScrapePhase.SCRAPE) mock_region.assert_called_with("us_wy") mock_scraper.start_scrape.assert_called() mock_supported.assert_called_with( stripes=[], timezone=pytz.timezone("America/Los_Angeles"))
def testExistsIsNyUpload( self, mock_get_all_tx: Mock, mock_open: Mock, mock_get: Mock ) -> None: historical_path = build_path(HISTORICAL_BUCKET, "new_york", EXISTING_PDF_NAME) upload_path = build_path(UPLOAD_BUCKET, "new_york", EXISTING_PDF_NAME) # Make the info call return an older modified time than the server time. self.fs.test_add_path(historical_path, local_path=None) mock_get_all_tx.return_value = {EXISTING_TEST_URL} mock_get.side_effect = _MockGet headers = {"X-Appengine-Cron": "test-cron"} response = self.client.get("/scrape_state?state=new_york", headers=headers) self.assertEqual(response.status_code, 200) self.assertListEqual(self.fs.all_paths, [historical_path, upload_path]) mock_open.assert_called_with(ANY, "wb") mock_get.assert_called_with(EXISTING_TEST_URL, verify=True)
def test_convert_slack_workspace_messages(self, mock_get_total_messages_and_usermessages: mock.Mock, mock_get_all_messages: mock.Mock, mock_allocate_ids: mock.Mock, mock_message: mock.Mock) -> None: added_channels = {'random': 1, 'general': 2} zerver_message = [{'id': 1}, {'id': 5}] realm = {'zerver_subscription': []} # type: Dict[str, Any] user_list = [] # type: List[Dict[str, Any]] zerver_usermessage = [{'id': 3}, {'id': 5}, {'id': 6}, {'id': 9}] mock_message.side_effect = [[zerver_message, zerver_usermessage]] message_json = convert_slack_workspace_messages('./random_path', user_list, 2, {}, {}, added_channels, realm) self.assertEqual(message_json['zerver_message'], zerver_message) self.assertEqual(message_json['zerver_usermessage'], zerver_usermessage)
def test_query_large_result_set(conn_cnx, db_parameters, ingest_data): """[s3] Gets Large Result set.""" sql = "select * from {name} order by 1".format(name=db_parameters["name"]) with conn_cnx( user=db_parameters["user"], account=db_parameters["account"], password=db_parameters["password"], ) as cnx: telemetry_data = [] add_log_mock = Mock() add_log_mock.side_effect = lambda datum: telemetry_data.append(datum) cnx._telemetry.add_log_to_batch = add_log_mock result2 = [] for rec in cnx.cursor().execute(sql): result2.append(rec) num_rows = len(result2) assert result2[0][0] == ingest_data[0] assert result2[num_rows - 1][8] == ingest_data[1] result999 = [] for rec in cnx.cursor().execute(sql): result999.append(rec) num_rows = len(result999) assert result999[0][0] == ingest_data[0] assert result999[num_rows - 1][8] == ingest_data[1] assert len(result2) == len( result999), "result length is different: result2, and result999" for i, (x, y) in enumerate(zip(result2, result999)): assert x == y, "element {}".format(i) # verify that the expected telemetry metrics were logged expected = [ TelemetryField.TIME_CONSUME_FIRST_RESULT, TelemetryField.TIME_CONSUME_LAST_RESULT, TelemetryField.TIME_PARSING_CHUNKS, TelemetryField.TIME_DOWNLOADING_CHUNKS, ] for field in expected: assert (sum([ 1 if x.message["type"] == field else 0 for x in telemetry_data ]) == 2), ("Expected three telemetry logs (one per query) " "for log type {}".format(field))
def test_transform_photo(self): def _get_url(dimensions): return dimensions user = UserFactory.create(userprofile={'timezone': 'Europe/Athens'}) get_photo_url_mock = Mock() get_photo_url_mock.side_effect = _get_url user.userprofile.get_photo_url = get_photo_url_mock data = UserProfileDetailedSerializer(user.userprofile).data photo = { 'value': '300x300', '150x150': '150x150', '300x300': '300x300', '500x500': '500x500', 'privacy': 'Mozillians' } eq_(data['photo'], photo)
def test_suggest_int(storage_init_func): # type: (Callable[[], storages.BaseStorage]) -> None mock = Mock() mock.side_effect = [1, 2, 3] sampler = samplers.RandomSampler() with patch.object(sampler, "sample_independent", mock) as mock_object: study = create_study(storage_init_func(), sampler=sampler) trial = Trial(study, study._storage.create_new_trial(study._study_id)) distribution = IntUniformDistribution(low=0, high=3) assert trial._suggest("x", distribution) == 1 # Test suggesting a param. assert trial._suggest("x", distribution) == 1 # Test suggesting the same param. assert trial._suggest("y", distribution) == 3 # Test suggesting a different param. assert trial.params == {"x": 1, "y": 3} assert mock_object.call_count == 3
def test_nocontention_and_no_lock_delete(self): mock_redis = Mock(spec=redis.Redis) mock_time = Mock() vals = [35, 0, 0, 0] mock_time.side_effect = lambda: vals.pop() @patch('retools.global_connection._redis', mock_redis) @patch('time.time', mock_time) def test_it(): lock = self._makeOne() with lock('somekey', expires=30): val = 2 + 4 test_it() method_names = [x[0] for x in mock_redis.method_calls] eq_(method_names[0], 'setnx') assert 'delete' not in method_names
def test_transform_photo(self): def _get_url(dimensions): return dimensions user = UserFactory.create(userprofile={'timezone': 'Europe/Athens'}) user.userprofile._groups = Group.objects.none() context = {'request': self.factory.get('/')} get_photo_url_mock = Mock() get_photo_url_mock.side_effect = _get_url user.userprofile.get_photo_url = get_photo_url_mock serializer = UserProfileDetailedSerializer(user.userprofile, context=context) photo = {'value': '300x300', '150x150': '150x150', '300x300': '300x300', '500x500': '500x500', 'privacy': 'Mozillians'} eq_(serializer.data['photo'], photo)
def test_slack_import_to_existing_database( self, mock_get_slack_api_data: mock.Mock, mock_build_avatar_url: mock.Mock, mock_build_avatar: mock.Mock, mock_process_uploads: mock.Mock, mock_attachment: mock.Mock) -> None: test_slack_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures") test_slack_zip_file = os.path.join(test_slack_dir, "test_slack_importer.zip") test_slack_unzipped_file = os.path.join(test_slack_dir, "test_slack_importer") test_realm_subdomain = 'test-slack-import' output_dir = os.path.join(settings.DEPLOY_ROOT, "var", "test-slack-importer-data") token = 'valid-token' # If the test fails, the 'output_dir' would not be deleted and hence it would give an # error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir' # hence we remove it before running 'do_convert_data' rm_tree(output_dir) # Also the unzipped data file should be removed if the test fails at 'do_convert_data' rm_tree(test_slack_unzipped_file) user_data_fixture = os.path.join(settings.DEPLOY_ROOT, "zerver", "fixtures", "slack_fixtures", "user_data.json") mock_get_slack_api_data.side_effect = [ ujson.load(open(user_data_fixture))['members'], {} ] do_convert_data(test_slack_zip_file, test_realm_subdomain, output_dir, token) self.assertTrue(os.path.exists(output_dir)) self.assertTrue(os.path.exists(output_dir + '/realm.json')) # test import of the converted slack data into an existing database do_import_realm(output_dir) self.assertTrue( get_realm(test_realm_subdomain).name, test_realm_subdomain) Realm.objects.filter(name=test_realm_subdomain).delete() remove_folder(output_dir) # remove tar file created in 'do_convert_data' function os.remove(output_dir + '.tar.gz') self.assertFalse(os.path.exists(output_dir))
def test_check_is_runnable(self, raw_input, ADBHost): raw_input.return_value = 'y' devices = Mock(return_value=True) ADBHost.return_value = Mock(devices=devices) # this won't raise errors launchers.FennecLauncher.check_is_runnable() # exception raised if there is no device raw_input.return_value = 'y' devices.return_value = False self.assertRaises(LauncherNotRunnable, launchers.FennecLauncher.check_is_runnable) # or if ADBHost().devices() raise an unexpected IOError devices.side_effect = ADBError() self.assertRaises(LauncherNotRunnable, launchers.FennecLauncher.check_is_runnable)
def test_snapshot_duplicate_failure(self): os.environ["Region"] = "us-west-2" factory_patch = patch('cluster_snapshot_function.boto3.client') mock_factory_boto_client = factory_patch.start() mock_response = Mock(name='response') mock_factory_boto_client.return_value = mock_response mock_factory_boto_client.side_effect = Exception( "Identifier:database-1 \nDBClusterSnapshotAlreadyExistsFault") mock_response.side_effect = Exception( "Identifier:database-1 \nDBClusterSnapshotAlreadyExistsFault") event = create_event() try: cluster_snapshot_function.lambda_create_cluster_snapshot(event, {}) except Exception as ex: self.assertEqual( str(ex), "Identifier:database-1 \nDBClusterSnapshotAlreadyExistsFault")
def test_scandir_success(self): mock_glfs_opendir = Mock() mock_glfs_opendir.return_value = 2 dirent1 = api.Dirent() dirent1.d_name = b"mockfile" dirent1.d_reclen = 8 stat1 = api.Stat() stat1.st_nlink = 1 stat1.st_mode = 33188 dirent2 = api.Dirent() dirent2.d_name = b"mockdir" dirent2.d_reclen = 7 stat2 = api.Stat() stat2.st_nlink = 2 stat2.st_mode = 16877 dirent3 = api.Dirent() dirent3.d_name = b"." dirent3.d_reclen = 1 stat3 = api.Stat() stat3.n_link = 2 stat3.st_mode = 16877 mock_Dir_next = Mock() mock_Dir_next.side_effect = [(dirent1, stat1), (dirent2, stat2), (dirent3, stat3), StopIteration] with patch("gluster.gfapi.api.glfs_opendir", mock_glfs_opendir): with patch("gluster.gfapi.Dir.__next__", mock_Dir_next): with patch("gluster.gfapi.Dir.next", mock_Dir_next): i = 0 for entry in self.vol.scandir("testdir"): self.assertTrue(isinstance(entry, DirEntry)) if entry.name == 'mockfile': self.assertEqual(entry.path, 'testdir/mockfile') self.assertTrue(entry.is_file()) self.assertFalse(entry.is_dir()) self.assertEqual(entry.stat().st_nlink, 1) elif entry.name == 'mockdir': self.assertEqual(entry.path, 'testdir/mockdir') self.assertTrue(entry.is_dir()) self.assertFalse(entry.is_file()) self.assertEqual(entry.stat().st_nlink, 2) else: self.fail("Unexpected entry") i = i + 1 self.assertEqual(i, 2)
def test_uploadHistogram(self): packer = xdrlib.Packer() packer.pack_int(1) packer.pack_int(0) noPartitions = Mock() noPartitions.status_code = 200 noPartitions.raw = packer.get_buffer() request = Mock() request.side_effect = [authRequest(), created(), noPartitions] sensorcloud.webrequest.Requests.Request = request device = sensorcloud.Device("FAKE", "fake") sensor = device.sensor("sensor") channel = sensor.channel("channel") channel.histogram_append(sensorcloud.SampleRate.hertz(10), [sensorcloud.Histogram(12345, 0.0, 1.0, [10.5, 4328.3])]) self.assertEqual(channel.last_timestamp_nanoseconds, 12345)
def test_suggest_discrete_uniform(storage_init_func): # type: (typing.Callable[[], storages.BaseStorage]) -> None mock = Mock() mock.side_effect = [1., 2., 3.] sampler = samplers.RandomSampler() with patch.object(sampler, 'sample_independent', mock) as mock_object: study = create_study(storage_init_func(), sampler=sampler) trial = Trial(study, study.storage.create_new_trial_id(study.study_id)) distribution = distributions.DiscreteUniformDistribution(low=0., high=3., q=1.) assert trial._suggest('x', distribution) == 1. # Test suggesting a param. assert trial._suggest('x', distribution) == 1. # Test suggesting the same param. assert trial._suggest('y', distribution) == 3. # Test suggesting a different param. assert trial.params == {'x': 1., 'y': 3.} assert mock_object.call_count == 3
def test_stop_timezone( self, mock_sessions: Mock, mock_close: Mock, mock_phase: Mock, mock_task_manager: Mock, mock_region: Mock, mock_supported: Mock, ) -> None: session = sessions.ScrapeSession.new( key=None, region="us_ut", scrape_type=constants.ScrapeType.BACKGROUND, phase=scrape_phase.ScrapePhase.SCRAPE, ) mock_sessions.return_value = session mock_close.return_value = [session] mock_scraper = create_autospec(BaseScraper) mock_region.return_value = fake_region(scraper=mock_scraper) mock_supported.side_effect = _MockSupported request_args = { "region": "all", "scrape_type": "all", "timezone": "America/New_York", "respect_is_stoppable": "false", } headers = {"X-Appengine-Cron": "test-cron"} response = self.client.get("/stop", query_string=request_args, headers=headers) assert response.status_code == 200 mock_sessions.assert_has_calls([ call(ScrapeKey("us_ut", constants.ScrapeType.BACKGROUND)), call(ScrapeKey("us_ut", constants.ScrapeType.SNAPSHOT)), ]) mock_phase.assert_has_calls( [call(session, scrape_phase.ScrapePhase.PERSIST)] * 2) mock_region.assert_has_calls([call("us_ut")]) mock_scraper.stop_scrape.assert_called_with( constants.ScrapeType.SNAPSHOT, "false") mock_supported.assert_called_with( stripes=[], timezone=pytz.timezone("America/New_York")) mock_task_manager.return_value.create_scraper_phase_task.assert_called_with( region_code="us_ut", url="/read_and_persist")
def test_restore_failure(self): os.environ["Region"] = "us-west-2" factory_patch = patch('cluster_restore_function.boto3.client') mock_factory_boto_client = factory_patch.start() mock_response = Mock(name='response') mock_factory_boto_client.return_value = mock_response mock_factory_boto_client.side_effect = Exception( "DBInstanceIdentifier:database-1 \nDBInstanceNotFound") mock_response.side_effect = Exception( "DBInstanceIdentifier:database-1 \nDBInstanceNotFound") event = create_event() try: cluster_restore_function.lambda_restore_dbcluster(event, {}) except Exception as ex: self.assertEqual( str(ex), "DBInstanceIdentifier:database-1 \nDBInstanceNotFound")
def testCaNoExistsUpload200( self, mock_get_all_ca: Mock, mock_open: Mock, mock_post: Mock ) -> None: upload_path = build_path(UPLOAD_BUCKET, "california", EXISTING_CA_NAME) # Make the info call return an older modified time than the server time. mock_get_all_ca.return_value = [(EXISTING_TEST_URL_CA, CA_POST_DATA)] mock_post.side_effect = _MockGet headers = {"X-Appengine-Cron": "test-cron"} response = self.client.get("/scrape_state?state=california", headers=headers) self.assertEqual(response.status_code, 200) self.assertListEqual(self.fs.all_paths, [upload_path]) mock_open.assert_called_with(ANY, "wb") mock_post.assert_called_with( EXISTING_TEST_URL_CA, data=CA_POST_DATA, verify=True )
def test_slack_import_to_existing_database(self, mock_get_slack_api_data: mock.Mock, mock_build_avatar_url: mock.Mock, mock_build_avatar: mock.Mock, mock_process_uploads: mock.Mock, mock_attachment: mock.Mock) -> None: test_slack_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "tests", "fixtures", "slack_fixtures") test_slack_zip_file = os.path.join(test_slack_dir, "test_slack_importer.zip") test_slack_unzipped_file = os.path.join(test_slack_dir, "test_slack_importer") test_realm_subdomain = 'test-slack-import' output_dir = os.path.join(settings.DEPLOY_ROOT, "var", "test-slack-importer-data") token = 'valid-token' # If the test fails, the 'output_dir' would not be deleted and hence it would give an # error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir' # hence we remove it before running 'do_convert_data' self.rm_tree(output_dir) # Also the unzipped data file should be removed if the test fails at 'do_convert_data' self.rm_tree(test_slack_unzipped_file) user_data_fixture = ujson.loads(self.fixture_data('user_data.json', type='slack_fixtures')) mock_get_slack_api_data.side_effect = [user_data_fixture['members'], {}] do_convert_data(test_slack_zip_file, output_dir, token) self.assertTrue(os.path.exists(output_dir)) self.assertTrue(os.path.exists(output_dir + '/realm.json')) # test import of the converted slack data into an existing database with self.settings(BILLING_ENABLED=False): do_import_realm(output_dir, test_realm_subdomain) realm = get_realm(test_realm_subdomain) self.assertTrue(realm.name, test_realm_subdomain) # test RealmAuditLog realmauditlog = RealmAuditLog.objects.filter(realm=realm) realmauditlog_event_type = {log.event_type for log in realmauditlog} self.assertEqual(realmauditlog_event_type, {RealmAuditLog.SUBSCRIPTION_CREATED, RealmAuditLog.REALM_PLAN_TYPE_CHANGED}) Realm.objects.filter(name=test_realm_subdomain).delete() remove_folder(output_dir) # remove tar file created in 'do_convert_data' function os.remove(output_dir + '.tar.gz') self.assertFalse(os.path.exists(output_dir))
def test_request_cached_with_changing_kwargs(self): """ Ensure that calling a decorated function with different keyword arguments will not use a cached value invoked by a previous call with different arguments. """ RequestCache.clear_all_namespaces() to_be_wrapped = Mock() to_be_wrapped.side_effect = [1, 2, 3, 4, 5, 6] self.assertEqual(to_be_wrapped.call_count, 0) def mock_wrapper(*args, **kwargs): """Simple wrapper to let us decorate our mock.""" return to_be_wrapped(*args, **kwargs) wrapped = request_cached(mock_wrapper) # This will be a miss, and make an underlying call. result = wrapped(1, foo=1) self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 1) # This will be a miss, and make an underlying call. result = wrapped(2, foo=2) self.assertEqual(result, 2) self.assertEqual(to_be_wrapped.call_count, 2) # This is bypass of the decorator. direct_result = mock_wrapper(3, foo=3) self.assertEqual(direct_result, 3) self.assertEqual(to_be_wrapped.call_count, 3) # These will be hits, and not make an underlying call. result = wrapped(1, foo=1) self.assertEqual(result, 1) self.assertEqual(to_be_wrapped.call_count, 3) result = wrapped(2, foo=2) self.assertEqual(result, 2) self.assertEqual(to_be_wrapped.call_count, 3) # Since we're changing foo, this will be a miss. result = wrapped(2, foo=5) self.assertEqual(result, 4) self.assertEqual(to_be_wrapped.call_count, 4)
def test__authenticate(self): authObj = auth.Authenticator(Mock(), auth.KeyStoneV2Authenticator, Mock(), Mock(), Mock(), Mock()) # test response code 200 resp = Mock() resp.status = 200 body = "test_body" auth.ServiceCatalog._load = Mock(return_value=1) authObj.client._time_request = Mock(return_value=(resp, body)) sc = authObj._authenticate(Mock(), Mock()) self.assertEqual(body, sc.catalog) # test AmbiguousEndpoints exception auth.ServiceCatalog.__init__ = \ Mock(side_effect=exceptions.AmbiguousEndpoints) self.assertRaises(exceptions.AmbiguousEndpoints, authObj._authenticate, Mock(), Mock()) # test handling KeyError and raising AuthorizationFailure exception auth.ServiceCatalog.__init__ = Mock(side_effect=KeyError) self.assertRaises(exceptions.AuthorizationFailure, authObj._authenticate, Mock(), Mock()) # test EndpointNotFound exception mock = Mock(side_effect=exceptions.EndpointNotFound) auth.ServiceCatalog.__init__ = mock self.assertRaises(exceptions.EndpointNotFound, authObj._authenticate, Mock(), Mock()) mock.side_effect = None # test response code 305 resp.__getitem__ = Mock(return_value='loc') resp.status = 305 body = "test_body" authObj.client._time_request = Mock(return_value=(resp, body)) l = authObj._authenticate(Mock(), Mock()) self.assertEqual('loc', l) # test any response code other than 200 and 305 resp.status = 404 exceptions.from_response = Mock(side_effect=ValueError) self.assertRaises(ValueError, authObj._authenticate, Mock(), Mock())
def test_202_accepted(self): # First accepted for start procesing, then response two time still processnig # finally a see other on complete. #response statuses = [202] def side_effect(): return statuses.pop() statuses_mock = Mock() statuses_mock.side_effect = side_effect #FIXME fix the pop crazy behaviour class MyResponse(Mock): @property def status(_self): try: return statuses.pop() except: return 303 response = MyResponse() self.getresponse_mock.return_value = response #getheader def getheader(header): headers = { 'content-type': 'application/json; charset=UTF-8', 'content-location': 'http://localhost/resource/123456/status', 'location': 'http://localhost/resource/123456', } return headers.get(header, sentinel.DEFAULT) response.getheader.side_effect = getheader #read expected_response = json.dumps( dict( state='pending', message='Your request has been accepted for processing.' ) ) response.read.return_value = expected_response obj, resp = self.api.collection.post(attr='value') logging.info(obj)
def test_update_previews_apply_throws_exception( self, apply_filter_mock: mock.Mock, update_preview_image_mock: mock.Mock): apply_filter_mock.side_effect = Exception stack = mock.Mock() presenter = mock.Mock() stack.presenter = presenter images = generate_images() presenter.get_image.return_value = images self.presenter.stack = stack self.presenter.do_update_previews() presenter.get_image.assert_called_once_with( self.presenter.model.preview_image_idx) self.view.clear_previews.assert_called_once() update_preview_image_mock.assert_called_once() apply_filter_mock.assert_called_once()
def test_send_fail_deadline_error(self): message = WebhookRequest( MockNotification(webhook_message_data={'data': 'value'}), 'https://www.thebluealliance.com', 'secret') error_mock = Mock() error_mock.side_effect = Exception( 'Deadline exceeded while waiting for HTTP response from URL: https://thebluealliance.com' ) with patch.object(urllib2, 'urlopen', error_mock) as mock_urlopen, patch.object( message, 'defer_track_notification') as mock_track: success = message.send() mock_urlopen.assert_called_once() mock_track.assert_not_called() self.assertTrue(success)
def run_mock(fabric_integration, monkeypatch): run = Mock() def _run(command, **kwargs): try: expected = run.expected.pop(0) except IndexError: # pragma: nocover expected = '', '', '' cmd, kw, result = expected assert command == cmd assert kwargs == kw return result run.side_effect = _run run.expected = [] monkeypatch.setattr('bsdploy.bootstrap_utils.run', run) monkeypatch.setattr('fabric.contrib.files.run', run) return run
def yesno_mock(monkeypatch): yesno = Mock() def _yesno(question): try: expected = yesno.expected.pop(0) except IndexError: # pragma: nocover expected = '', False cmd, result = expected assert question == cmd print question return result yesno.side_effect = _yesno yesno.expected = [] monkeypatch.setattr('bsdploy.bootstrap_utils.yesno', yesno) monkeypatch.setattr('ploy.common.yesno', yesno) return yesno
def local_mock(fabric_integration, monkeypatch): from mock import Mock local = Mock() def _local(command, **kwargs): try: expected = local.expected.pop(0) except IndexError: # pragma: nocover expected = '', '', '' cmd, kw, result = expected assert command == cmd assert kwargs == kw return result local.side_effect = _local local.expected = [] monkeypatch.setattr('bsdploy.bootstrap_utils.local', local) return local