def test_login(self): resp = self.client.post(reverse(views.login), dict(openid='http://me.yahoo.com')) assert 'login.yahooapis' in resp['location'] with patch('allauth.socialaccount.providers' '.openid.views._openid_consumer') as consumer_mock: client = Mock() complete = Mock() consumer_mock.return_value = client client.complete = complete complete_response = Mock() complete.return_value = complete_response complete_response.status = consumer.SUCCESS complete_response.identity_url = 'http://dummy/john/' with patch('allauth.socialaccount.providers' '.openid.utils.SRegResponse') as sr_mock: with patch('allauth.socialaccount.providers' '.openid.utils.FetchResponse') as fr_mock: sreg_mock = Mock() ax_mock = Mock() sr_mock.fromSuccessResponse = sreg_mock fr_mock.fromSuccessResponse = ax_mock sreg_mock.return_value = {} ax_mock.return_value = {AXAttribute.PERSON_FIRST_NAME: ['raymond']} resp = self.client.post(reverse('openid_callback')) self.assertEqual('http://testserver/accounts/profile/', resp['location']) get_user_model().objects.get(first_name='raymond')
def test_init_thresholds(self): mock_svc1 = Mock(spec_set=_AwsService) mock_svc2 = Mock(spec_set=_AwsService) mock_foo = Mock(spec_set=_AwsService) mock_bar = Mock(spec_set=_AwsService) mock_foo.return_value = mock_svc1 mock_bar.return_value = mock_svc2 svcs = {'SvcFoo': mock_foo, 'SvcBar': mock_bar} with patch.dict('awslimitchecker.checker._services', values=svcs, clear=True): with patch.multiple( 'awslimitchecker.checker', logger=DEFAULT, _get_version_info=DEFAULT, autospec=True, ) as mocks: mock_version = mocks['_get_version_info'] mock_version.return_value = self.mock_ver_info cls = AwsLimitChecker( warning_threshold=5, critical_threshold=22, ) # dict should be of _AwsService instances assert cls.services == { 'SvcFoo': mock_svc1, 'SvcBar': mock_svc2 } # _AwsService instances should exist, but have no other calls assert mock_foo.mock_calls == [call(5, 22, None, None, None)] assert mock_bar.mock_calls == [call(5, 22, None, None, None)] assert mock_svc1.mock_calls == [] assert mock_svc2.mock_calls == [] assert self.mock_version.mock_calls == [call()] assert self.cls.vinfo == self.mock_ver_info
def test_canInstall(self, mock_canInstall): posix = self.get_obj() entry = lxml.etree.Element("Path", name="test", type="file") # first, test superclass canInstall failure mock_canInstall.return_value = False self.assertFalse(posix.canInstall(entry)) mock_canInstall.assert_called_with(posix, entry) # next, test fully_specified failure posix.logger.error.reset_mock() mock_canInstall.reset_mock() mock_canInstall.return_value = True mock_fully_spec = Mock() mock_fully_spec.return_value = False posix._handlers[entry.get("type")].fully_specified = \ mock_fully_spec self.assertFalse(posix.canInstall(entry)) mock_canInstall.assert_called_with(posix, entry) mock_fully_spec.assert_called_with(entry) self.assertTrue(posix.logger.error.called) # finally, test success posix.logger.error.reset_mock() mock_canInstall.reset_mock() mock_fully_spec.reset_mock() mock_fully_spec.return_value = True self.assertTrue(posix.canInstall(entry)) mock_canInstall.assert_called_with(posix, entry) mock_fully_spec.assert_called_with(entry) self.assertFalse(posix.logger.error.called)
def setup_interval_thread(self, **kwargs): """ Sets up an interval thread class with mocks of unimplemented methods to allow testing of the base implementation of those methods that have concrete implementations. """ logger = kwargs.get('logger', self.logger) config = kwargs.get('config', self.config) source = kwargs.get('source', self.source) dest = kwargs.get('dest', self.dest) terminate_event = kwargs.get('terminate_event', self.terminate_event) interval = kwargs.get('interval', self.interval) oneshot = kwargs.get('oneshot', self.oneshot) interval_thread = IntervalThread(logger, config, source, dest, terminate_event, interval, oneshot) mock_get_data_impl = kwargs.get('mock_get_data', None) if mock_get_data_impl is None: mock_get_data_impl = Mock() mock_get_data_impl.return_value = self._get_data_return mock_send_data_impl = kwargs.get('mock_send_data', None) if mock_send_data_impl is None: mock_send_data_impl = Mock() mock_send_data_impl.return_value = self._send_data_return interval_thread._get_data = mock_get_data_impl interval_thread._send_data = mock_send_data_impl return interval_thread
def test_actualizar(self): ''' Prueba el metodo actualizar. ''' # preparo datos self.actualizador.version_actual = 'a' self.actualizador.version_disponible = 'b' mock_descargar = Mock() mock_descargar.return_value = [ { 'nombre': 'foo', 'descripcion': 'bar' }, { 'nombre': 'bar', 'descripcion': 'bar' }, ] self.actualizador.descargar_actualizacion = mock_descargar mock_aplicar = Mock() mock_aplicar.return_value = True self.actualizador.aplicar_actualizacion = mock_aplicar # llamo metodo a probar self.actualizador.actualizar() # verifico que todo este bien mock_descargar.assert_called_once() mock_aplicar.assert_called() assert mock_aplicar.call_count == 2 assert self.actualizador.version_actual == 'b'
def test_get_all_with_clauses(get_query_clauses): """ Test :py:meth:`~.BaseRallyModel.get_all` with clauses passed in. Test that: * ``get_all`` calls ``get_query_clauses`` * Uses the result to call ``get_all_results_for_query`` * returns the set of objects as returned by ``convert_from_query_result`` """ DummyClass = get_inherited_class_object() get_query_clauses.return_value = "mock_query" mock_get_all_results_for_query = Mock() mock_get_all_results_for_query.return_value = "mock_results" mock_convert_from_query_result = Mock() mock_convert_from_query_result.return_value = "mock_conversion" DummyClass.get_all_results_for_query = mock_get_all_results_for_query DummyClass.convert_from_query_result = mock_convert_from_query_result response = DummyClass.get_all("clauses") assert_equal(get_query_clauses.call_args[0][0], "clauses") assert_equal(mock_get_all_results_for_query.call_args[0][0], "mock_query") assert_equal(mock_convert_from_query_result.call_args[0][0], "mock_results") assert_equal(response, "mock_conversion")
def test_request_handler_returns_process_stdout_when_making_response(self, lambda_output_parser_mock): make_response_mock = Mock() self.service.service_response = make_response_mock self.service._get_current_route = Mock() self.service._construct_event = Mock() parse_output_mock = Mock() parse_output_mock.return_value = ("status_code", "headers", "body") self.service._parse_lambda_output = parse_output_mock lambda_logs = "logs" lambda_response = "response" is_customer_error = False lambda_output_parser_mock.get_lambda_output.return_value = lambda_response, lambda_logs, is_customer_error service_response_mock = Mock() service_response_mock.return_value = make_response_mock self.service.service_response = service_response_mock result = self.service._request_handler() self.assertEquals(result, make_response_mock) lambda_output_parser_mock.get_lambda_output.assert_called_with(ANY) # Make sure the parse method is called only on the returned response and not on the raw data from stdout parse_output_mock.assert_called_with(lambda_response, ANY, ANY) # Make sure the logs are written to stderr self.stderr.write.assert_called_with(lambda_logs)
def test_get_all_without_clauses(get_query_clauses): """ Test :py:meth:`~.BaseRallyModel.get_all` with no clauses passed in. Test that: * ``get_all`` does not call ``get_query_clauses`` * Calls ``get_all_results_for_query`` with a blank query. * returns the set of objects as returned by ``convert_from_query_result`` """ DummyClass = get_inherited_class_object() mock_get_all_results_for_query = Mock() mock_get_all_results_for_query.return_value = "mock_results" mock_convert_from_query_result = Mock() mock_convert_from_query_result.return_value = "mock_conversion" DummyClass.get_all_results_for_query = mock_get_all_results_for_query DummyClass.convert_from_query_result = mock_convert_from_query_result response = DummyClass.get_all() assert_false(get_query_clauses.called) assert_equal(mock_get_all_results_for_query.call_args[0][0], "") assert_equal(mock_convert_from_query_result.call_args[0][0], "mock_results") assert_equal(response, "mock_conversion")
def test_channel_pool_release_reacquire(self, bcbmock): ncm = Mock() ncm.return_value = Mock(spec=BidirClientChannel) ncm.return_value._queue_auto_delete = False ncm.return_value.get_channel_id.return_value = sentinel.chid with patch('pyon.net.messaging.NodeB._new_channel', ncm): ch = self._node.channel(BidirClientChannel) # return value is not a mock factory - it returns the same mock instance as we declared above # so redeclare it so they get unique chids ncm.return_value = Mock(spec=BidirClientChannel) ncm.return_value._queue_auto_delete = False ncm.return_value.get_channel_id.return_value = sentinel.chid2 with patch('pyon.net.messaging.NodeB._new_channel', ncm): ch2 = self._node.channel(BidirClientChannel) self.assertEquals(ch.get_channel_id(), sentinel.chid) self.assertEquals(ch2.get_channel_id(), sentinel.chid2) # return ch to the pool with patch('pyon.net.messaging.log'): self._node.on_channel_request_close(ch) # reacquire ch call_count = ncm.call_count with patch('pyon.net.messaging.NodeB._new_channel', ncm): ch3 = self._node.channel(BidirClientChannel) # no new calls to the create method have been made self.assertEquals(ncm.call_count, call_count) # we got the first mocked channel back self.assertEquals(ch3.get_channel_id(), sentinel.chid)
def layer_vars(self, request, dummy_input_layer, DenseLayer): input_shape = dummy_input_layer.shape num_units = 5 num_leading_axes = request.param W_shape = (np.prod(input_shape[num_leading_axes:]), num_units) b_shape = (num_units,) W = Mock() b = Mock() nonlinearity = Mock() W.return_value = np.arange(np.prod(W_shape)).reshape(W_shape) b.return_value = np.arange(np.prod(b_shape)).reshape(b_shape) * 3 layer = DenseLayer( dummy_input_layer, num_units=num_units, num_leading_axes=num_leading_axes, W=W, b=b, nonlinearity=nonlinearity, ) return { 'input_shape': input_shape, 'num_units': num_units, 'num_leading_axes': num_leading_axes, 'W_shape': W_shape, 'b_shape': b_shape, 'W': W, 'b': b, 'nonlinearity': nonlinearity, 'layer': layer, }
def test_channel_pool(self): ncm = Mock() ncm.return_value = Mock(spec=BidirClientChannel) ncm.return_value._queue_auto_delete = False ncm.return_value.get_channel_id.return_value = sentinel.chid with patch('pyon.net.messaging.NodeB._new_channel', ncm): ch = self._node.channel(BidirClientChannel) # should expect to see this show up in the node's mappings self.assertIn(ch, self._node._bidir_pool.itervalues()) self.assertIn(sentinel.chid, self._node._pool_map) self.assertEquals(len(self._node._pool_map), 1) self.assertEquals(len(self._node._pool_map), len(self._node._bidir_pool)) # let's grab another one to watch our pool grow # return value is not a mock factory - it returns the same mock instance as we declared above # so redeclare it so they get unique chids ncm.return_value = Mock(spec=BidirClientChannel) ncm.return_value._queue_auto_delete = False ncm.return_value.get_channel_id.return_value = sentinel.chid2 with patch('pyon.net.messaging.NodeB._new_channel', ncm): ch2 = self._node.channel(BidirClientChannel) self.assertEquals(ch.get_channel_id(), sentinel.chid) self.assertEquals(ch2.get_channel_id(), sentinel.chid2) self.assertNotEqual(ch, ch2) self.assertIn(ch2, self._node._bidir_pool.itervalues()) self.assertIn(sentinel.chid2, self._node._pool_map) self.assertEquals(len(self._node._pool_map), 2) self.assertEquals(len(self._node._pool_map), len(self._node._bidir_pool))
def test_channel_pool_release(self): ncm = Mock() ncm.return_value = Mock(spec=BidirClientChannel) ncm.return_value._queue_auto_delete = False ncm.return_value.get_channel_id.return_value = sentinel.chid with patch('pyon.net.messaging.NodeB._new_channel', ncm): ch = self._node.channel(BidirClientChannel) # return value is not a mock factory - it returns the same mock instance as we declared above # so redeclare it so they get unique chids ncm.return_value = Mock(spec=BidirClientChannel) ncm.return_value._queue_auto_delete = False ncm.return_value.get_channel_id.return_value = sentinel.chid2 with patch('pyon.net.messaging.NodeB._new_channel', ncm): ch2 = self._node.channel(BidirClientChannel) self.assertEquals(ch.get_channel_id(), sentinel.chid) self.assertEquals(ch2.get_channel_id(), sentinel.chid2) # return ch to the pool with patch('pyon.net.messaging.log'): self._node.on_channel_request_close(ch) # expect to have bidir pool of two, pool map of 1 self.assertEquals(len(self._node._bidir_pool), 2) self.assertEquals(len(self._node._pool_map), 1) # ch2 still active so it should be in the pool map self.assertIn(sentinel.chid2, self._node._pool_map)
def test_handles(self, mock_get_regex): match = Mock() mock_get_regex.return_value = Mock() mock_get_regex.return_value.match = match evt = Mock() evt.filename = "event.txt" if self.test_obj.__basenames__: match.return_value = False self.assertFalse(self.test_obj.handles(evt)) mock_get_regex.assert_called_with([b for b in self.test_obj.__basenames__]) print("match calls: %s" % match.call_args_list) print("expected: %s" % [call(evt.filename) for b in self.test_obj.__basenames__]) match.assert_called_with(evt.filename) mock_get_regex.reset_mock() match.reset_mock() match.return_value = True self.assertTrue(self.test_obj.handles(evt)) match.assert_called_with(evt.filename) else: match.return_value = False self.assertFalse(self.test_obj.handles(evt, basename=os.path.basename(self.path))) mock_get_regex.assert_called_with([os.path.basename(self.path)]) match.assert_called_with(evt.filename) mock_get_regex.reset_mock() match.reset_mock() match.return_value = True self.assertTrue(self.test_obj.handles(evt, basename=os.path.basename(self.path))) mock_get_regex.assert_called_with([os.path.basename(self.path)]) match.assert_called_with(evt.filename)
def test_rmtree_success(self): dir1_list = ["dir2", "file"] empty_list = [] mock_listdir = Mock() mock_listdir.side_effect = [dir1_list, empty_list] mock_isdir = Mock() mock_isdir.side_effect = [True, False] mock_unlink = Mock() mock_unlink.return_value = 0 mock_rmdir = Mock() mock_rmdir.return_value = 0 mock_islink = Mock() mock_islink.return_value = False with nested(patch("gluster.gfapi.Volume.listdir", mock_listdir), patch("gluster.gfapi.Volume.isdir", mock_isdir), patch("gluster.gfapi.Volume.islink", mock_islink), patch("gluster.gfapi.Volume.unlink", mock_unlink), patch("gluster.gfapi.Volume.rmdir", mock_rmdir)): self.vol.rmtree("dir1") mock_rmdir.assert_any_call("dir1/dir2") mock_unlink.assert_called_once_with("dir1/file") mock_rmdir.assert_called_with("dir1")
def test_validation(self): mock_get_data = Mock() mock_get_data.return_value = {} mock_construct = Mock() mock_construct.return_value = {} with patch.multiple(self.client, get_data=mock_get_data, construct_single_location_data=mock_construct): with self.assertRaises(TypeError): self.client.get_districts(12.3456, 10.432) with self.assertRaises(ValueError): self.client.get_districts(('a', 10.432)) with self.assertRaises(ValueError): self.client.get_districts((10.432, 'a')) try: self.client.get_districts((12.3456, 10.432)) self.client.get_districts((12.3456, -10.432)) self.client.get_districts((12, 10)) self.client.get_districts((12, -10)) self.client.get_districts(('12.3456', '10.432')) self.client.get_districts(('12.3456', '-10.432')) self.client.get_districts(('12', '10')) self.client.get_districts(('12', '-10')) except (TypeError, ValueError): self.fail('get_districts should accept float, int, and strings that ' 'can be converted to floats/ints')
def test_collect_uses_defaults(self, cmdgen): oids = { '1.2.3': 'foo.bar', } # We should allow assuming default values for community and port config = { 'devices': { 'mydevice': { 'host': 'localhost', 'oids': oids, } } } # Setup mocks auth = Mock() auth.return_value = auth transport = Mock() transport.return_value = transport collect_snmp = Mock() with patch.multiple(self.collector, config=config, collect_snmp=collect_snmp): self.collector.collect() collect_snmp.assert_called_with( 'mydevice', 'localhost', 161, 'public' )
def test_collect(self, cmdgen): oids = { '1.2.3': 'foo.bar', } config = { 'devices': { 'mydevice': { 'host': 'localhost', 'port': 161, 'community': 'public', 'oids': oids, } } } # Setup mocks auth = Mock() auth.return_value = auth transport = Mock() transport.return_value = transport collect_snmp = Mock() with patch.multiple(self.collector, config=config, collect_snmp=collect_snmp): self.collector.collect() collect_snmp.assert_called_with( 'mydevice', 'localhost', 161, 'public' )
def test_init_sts_external_id(self): mock_svc1 = Mock(spec_set=_AwsService) mock_svc2 = Mock(spec_set=_AwsService) mock_foo = Mock(spec_set=_AwsService) mock_bar = Mock(spec_set=_AwsService) mock_ta = Mock(spec_set=TrustedAdvisor) mock_foo.return_value = mock_svc1 mock_bar.return_value = mock_svc2 svcs = {'SvcFoo': mock_foo, 'SvcBar': mock_bar} with patch.dict('awslimitchecker.checker._services', values=svcs, clear=True): with patch.multiple( 'awslimitchecker.checker', logger=DEFAULT, _get_version_info=DEFAULT, TrustedAdvisor=DEFAULT, autospec=True, ) as mocks: mock_version = mocks['_get_version_info'] mock_version.return_value = self.mock_ver_info mock_ta_constr = mocks['TrustedAdvisor'] mocks['TrustedAdvisor'].return_value = mock_ta cls = AwsLimitChecker( account_id='123456789012', account_role='myrole', region='myregion', external_id='myextid', mfa_serial_number=None, mfa_token=None ) # dict should be of _AwsService instances services = { 'SvcFoo': mock_svc1, 'SvcBar': mock_svc2 } assert cls.services == services # _AwsService instances should exist, but have no other calls assert mock_foo.mock_calls == [ call(80, 99, '123456789012', 'myrole', 'myregion', 'myextid', None, None) ] assert mock_bar.mock_calls == [ call(80, 99, '123456789012', 'myrole', 'myregion', 'myextid', None, None) ] assert mock_ta_constr.mock_calls == [ call( services, account_id='123456789012', account_role='myrole', region='myregion', external_id='myextid', mfa_serial_number=None, mfa_token=None, ) ] assert mock_svc1.mock_calls == [] assert mock_svc2.mock_calls == [] assert self.mock_version.mock_calls == [call()] assert self.cls.vinfo == self.mock_ver_info
def test_template_global_export(self, yaml_parse_mock): parent_dir = os.path.sep template_dir = os.path.join(parent_dir, 'foo', 'bar') template_path = os.path.join(template_dir, 'path') template_str = self.example_yaml_template() resource_type1_class = Mock() resource_type1_instance = Mock() resource_type1_class.return_value = resource_type1_instance resource_type2_class = Mock() resource_type2_instance = Mock() resource_type2_class.return_value = resource_type2_instance resources_to_export = { "resource_type1": resource_type1_class, "resource_type2": resource_type2_class } properties1 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}}} properties2 = {"foo": "bar", "Fn::Transform": {"Name": "AWS::OtherTransform"}} properties_in_list = {"Fn::Transform": {"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}}} template_dict = { "Resources": { "Resource1": { "Type": "resource_type1", "Properties": properties1 }, "Resource2": { "Type": "resource_type2", "Properties": properties2, } }, "List": ["foo", properties_in_list] } open_mock = mock.mock_open() include_transform_export_handler_mock = Mock() include_transform_export_handler_mock.return_value = {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}} yaml_parse_mock.return_value = template_dict with patch( "awscli.customizations.cloudformation.artifact_exporter.open", open_mock(read_data=template_str)) as open_mock: with patch.dict(GLOBAL_EXPORT_DICT, {"Fn::Transform": include_transform_export_handler_mock}): template_exporter = Template( template_path, parent_dir, self.s3_uploader_mock, resources_to_export) exported_template = template_exporter.export_global_artifacts(template_exporter.template_dict) first_call_args, kwargs = include_transform_export_handler_mock.call_args_list[0] second_call_args, kwargs = include_transform_export_handler_mock.call_args_list[1] third_call_args, kwargs = include_transform_export_handler_mock.call_args_list[2] call_args = [first_call_args[0], second_call_args[0], third_call_args[0]] self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "foo.yaml"}} in call_args) self.assertTrue({"Name": "AWS::OtherTransform"} in call_args) self.assertTrue({"Name": "AWS::Include", "Parameters": {"Location": "bar.yaml"}} in call_args) self.assertEquals(include_transform_export_handler_mock.call_count, 3) #new s3 url is added to include location self.assertEquals(exported_template["Resources"]["Resource1"]["Properties"]["Fn::Transform"], {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}}) self.assertEquals(exported_template["List"][1]["Fn::Transform"], {"Name": "AWS::Include", "Parameters": {"Location": "s3://foo"}})
def test_template_export_metadata(self, yaml_parse_mock): parent_dir = os.path.sep template_dir = os.path.join(parent_dir, 'foo', 'bar') template_path = os.path.join(template_dir, 'path') template_str = self.example_yaml_template() metadata_type1_class = Mock() metadata_type1_class.RESOURCE_TYPE = "metadata_type1" metadata_type1_class.PROPERTY_NAME = "property_1" metadata_type1_instance = Mock() metadata_type1_class.return_value = metadata_type1_instance metadata_type2_class = Mock() metadata_type2_class.RESOURCE_TYPE = "metadata_type2" metadata_type2_class.PROPERTY_NAME = "property_2" metadata_type2_instance = Mock() metadata_type2_class.return_value = metadata_type2_instance metadata_to_export = [ metadata_type1_class, metadata_type2_class ] template_dict = { "Metadata": { "metadata_type1": { "property_1": "abc" }, "metadata_type2": { "property_2": "def" } } } open_mock = mock.mock_open() yaml_parse_mock.return_value = template_dict # Patch the file open method to return template string with patch( "awscli.customizations.cloudformation.artifact_exporter.open", open_mock(read_data=template_str)) as open_mock: template_exporter = Template( template_path, parent_dir, self.s3_uploader_mock, metadata_to_export=metadata_to_export) exported_template = template_exporter.export() self.assertEquals(exported_template, template_dict) open_mock.assert_called_once_with( make_abs_path(parent_dir, template_path), "r") self.assertEquals(1, yaml_parse_mock.call_count) metadata_type1_class.assert_called_once_with(self.s3_uploader_mock) metadata_type1_instance.export.assert_called_once_with( "metadata_type1", mock.ANY, template_dir) metadata_type2_class.assert_called_once_with(self.s3_uploader_mock) metadata_type2_instance.export.assert_called_once_with( "metadata_type2", mock.ANY, template_dir)
def test_makedirs_fail_exception(self): mock_glfs_mkdir = Mock() mock_glfs_mkdir.return_value = -1 mock_exists = Mock() mock_exists.return_value = False with nested(patch("gluster.gfapi.api.glfs_mkdir", mock_glfs_mkdir), patch("gluster.gfapi.Volume.exists", mock_exists)): self.assertRaises(OSError, self.vol.makedirs, "dir1/dir2", 0775)
def layer_vars(self, NINLayer_c01b, dummy_input_layer): W = Mock() b = Mock() nonlinearity = Mock() W.return_value = np.ones((5, 3)) b.return_value = np.ones((5,)) layer = NINLayer_c01b(dummy_input_layer, num_units=5, W=W, b=b, nonlinearity=nonlinearity) return {"W": W, "b": b, "nonlinearity": nonlinearity, "layer": layer}
def mock_hardware(monkeypatch): bmp_controller = Mock() bmp_controller.return_value = bmp_controller wiring_probe = Mock() wiring_probe.return_value = wiring_probe monkeypatch.setattr( spinner.scripts.proxy_server, "BMPController", bmp_controller) monkeypatch.setattr( spinner.scripts.proxy_server, "WiringProbe", wiring_probe) return (bmp_controller, wiring_probe)
def mock_urlopen(retval, exception=None, bad_response=False): urlopen = Mock() if bad_response: urlopen.return_value = StringIO(unicode([1, 2, 'hello'])) elif exception: urlopen.side_effect = exception elif retval is None: urlopen.return_value = StringIO(unicode([])) else: urlopen.return_value = StringIO(unicode([retval])) return urlopen
def test_sudo_support(): fn = Mock(return_value=True, __name__='') assert sudo_support(fn)(Command('sudo ls', 'out', 'err'), None) fn.assert_called_once_with(Command('ls', 'out', 'err'), None) fn.return_value = False assert not sudo_support(fn)(Command('sudo ls', 'out', 'err'), None) fn.return_value = 'pwd' assert sudo_support(fn)(Command('sudo ls', 'out', 'err'), None) == 'sudo pwd' assert sudo_support(fn)(Command('ls', 'out', 'err'), None) == 'pwd'
def layer_vars(self, dummy_input_layer): from lasagne.layers.dense import DenseLayer W = Mock() b = Mock() nonlinearity = Mock() W.return_value = np.ones((12, 3)) b.return_value = np.ones((3,)) * 3 layer = DenseLayer(dummy_input_layer, num_units=3, W=W, b=b, nonlinearity=nonlinearity) return {"W": W, "b": b, "nonlinearity": nonlinearity, "layer": layer}
def test_widget(self): subfield_1 = Mock(name='subfield_1') subfield_1.return_value = u'[first input]' subfield_2 = Mock(name='subfield_2') subfield_2.return_value = u'[second input]' field = Mock(name='field') field.style = {'style': 'display:none;'} field.__iter__ = Mock(return_value=iter([subfield_1, subfield_2])) widget = HoneyPotWidget() actual = widget(field) subfield_2.assert_called_with(style='display:none;') subfield_1.assert_called_with(style='display:none;') self.assertEqual(actual, '[first input][second input]')
def test_sends_mail_with_message_dict(monkeypatch): request = test.RequestFactory().get(reverse("contact")) get_message_dict = Mock() get_message_dict.return_value = {"to": ["*****@*****.**"]} monkeypatch.setattr( "contact_form_bootstrap.forms.BaseEmailFormMixin.get_message_dict", get_message_dict) send = Mock() send.return_value = 1 monkeypatch.setattr("django.core.mail.message.EmailMessage.send", send) form = ContactForm() assert form.send_email(request) == 1
def test_find_with_postcode_errors(self): """ Tests boris.BikeChecker.find_with_postcode exceptions """ get_mock = Mock(return_value=None) patcher = patch.object(self.bc, 'pc', new=Mock(get=get_mock)) patcher.start() f = self.bc.find_with_postcode self.assertRaises(InvalidPostcodeException, f, None) get_mock.return_value = {'foo': 'bar'} self.assertRaises(InvalidDataException, f, None) get_mock.return_value = {'geo': {'lat':0}} self.assertRaises(InvalidDataException, f, None) patcher.stop()
def test_load(self): rb = ring.RingBuilder(8, 3, 1) devs = [{'id': 0, 'region': 0, 'zone': 0, 'weight': 1, 'ip': '127.0.0.0', 'port': 10000, 'device': 'sda1', 'meta': 'meta0'}, {'id': 1, 'region': 0, 'zone': 1, 'weight': 1, 'ip': '127.0.0.1', 'port': 10001, 'device': 'sdb1', 'meta': 'meta1'}, {'id': 2, 'region': 0, 'zone': 2, 'weight': 2, 'ip': '127.0.0.2', 'port': 10002, 'device': 'sdc1', 'meta': 'meta2'}, {'id': 3, 'region': 0, 'zone': 3, 'weight': 2, 'ip': '127.0.0.3', 'port': 10003, 'device': 'sdd1'}] for d in devs: rb.add_dev(d) rb.rebalance() real_pickle = pickle.load try: #test a legit builder fake_pickle = Mock(return_value=rb) fake_open = Mock(return_value=None) pickle.load = fake_pickle builder = RingBuilder.load('fake.builder', open=fake_open) self.assertEquals(fake_pickle.call_count, 1) fake_open.assert_has_calls([mock_call('fake.builder', 'rb')]) self.assertEquals(builder, rb) fake_pickle.reset_mock() fake_open.reset_mock() #test old style builder fake_pickle.return_value = rb.to_dict() pickle.load = fake_pickle builder = RingBuilder.load('fake.builder', open=fake_open) fake_open.assert_has_calls([mock_call('fake.builder', 'rb')]) self.assertEquals(builder.devs, rb.devs) fake_pickle.reset_mock() fake_open.reset_mock() #test old devs but no meta no_meta_builder = rb for dev in no_meta_builder.devs: del(dev['meta']) fake_pickle.return_value = no_meta_builder pickle.load = fake_pickle builder = RingBuilder.load('fake.builder', open=fake_open) fake_open.assert_has_calls([mock_call('fake.builder', 'rb')]) self.assertEquals(builder.devs, rb.devs) fake_pickle.reset_mock() finally: pickle.load = real_pickle
def _mock_sched(self, ret): sched = Mock() sched.return_value = ret sched.tracking = list() sched.__repr__ = lambda self: repr(ret) return sched
raise ImportError("callback tests require Django > 1.3 for RequestFactory") from django.contrib.auth.models import User, AnonymousUser from la_facebook.access import OAuthAccess, OAuth20Token from la_facebook.callbacks.base import BaseFacebookCallback from la_facebook.models import UserAssociation factory = RequestFactory() mock_fetch_user_data = Mock() mock_fetch_user_data.return_value = { "id": "8675309", "name": "Herpa Derp", "first_name": "Herpa", "last_name": "Derp", "link": "http://www.facebook.com/herpaderp", "gender": "male", "locale": "fr_FR" } class BaseCallbackTest(TestCase): urls = 'la_facebook.tests.urls' def setUp(self): self.request = factory.get('/callback',data={'next':'dummy'}) test_user = User() test_user.username = '******' test_user.email = '*****@*****.**' test_user.save()
def test_fsync_fail_exception(self): mock_glfs_fsync = Mock() mock_glfs_fsync.return_value = -1 with patch("gluster.gfapi.api.glfs_fsync", mock_glfs_fsync): self.assertRaises(OSError, self.fd.fsync)
def test_lazylist_get(): mock_func = Mock() mock_func.return_value = 1 ll = LazyList([mock_func] * 10) assert len(ll) == 10 assert ll[0] == 1
def test_wait_until_call_timeout_error(self, time_sleep_mock): func_ = Mock() func_.return_value = False with pytest.raises(TimeoutError): util.wait_until(func_, 5)
def test_statvfs_fail_exception(self): mock_glfs_statvfs = Mock() mock_glfs_statvfs.return_value = -1 with patch("gluster.gfapi.api.glfs_statvfs", mock_glfs_statvfs): self.assertRaises(OSError, self.vol.statvfs, "/")
def mongo_search_returning_colors_are_wrong_ticket(): ticket = create_colors_are_wrong_ticket() tickets = [ ticket ] paged_query = Mock() paged_query.return_value = dict(tickets=tickets) return patch('forgetracker.tracker_main.TM.Ticket.paged_query', paged_query)
def test_listxattr_fail_exception(self): mock_glfs_listxattr = Mock() mock_glfs_listxattr.return_value = -1 with patch("gluster.gfapi.api.glfs_listxattr", mock_glfs_listxattr): self.assertRaises(IOError, self.vol.listxattr, "file.txt")
def test_stat_fail_exception(self): mock_glfs_stat = Mock() mock_glfs_stat.return_value = -1 with patch("gluster.gfapi.api.glfs_stat", mock_glfs_stat): self.assertRaises(OSError, self.vol.stat, "file.txt")
def test_listdir_fail_exception(self): mock_glfs_opendir = Mock() mock_glfs_opendir.return_value = None with patch("gluster.gfapi.api.glfs_opendir", mock_glfs_opendir): self.assertRaises(OSError, self.vol.listdir, "test.txt")
def test_chown_fail_exception(self): mock_glfs_chown = Mock() mock_glfs_chown.return_value = -1 with patch("gluster.gfapi.api.glfs_chown", mock_glfs_chown): self.assertRaises(OSError, self.vol.chown, "file.txt", 9, 11)
def test_portage_detect(): if not is_gentoo(): print('Skipping not Gentoo') return from rosdep2.platforms.gentoo import portage_detect m = Mock() m.return_value = [] val = portage_detect([], exec_fn=m) assert val == [], val # Test checking for a package that we do not have installed m = Mock(return_value=[]) val = portage_detect(['tinyxml[stl]'], exec_fn=m) assert val == [], 'Result was actually: %s' % val m.assert_called_with(['portageq', 'match', '/', 'tinyxml[stl]']) # Test checking for a package that we do have installed m = Mock(return_value=['dev-libs/tinyxml-2.6.2-r1']) val = portage_detect(['tinyxml[stl]'], exec_fn=m) assert val == ['tinyxml[stl]'], 'Result was actually: %s' % val m.assert_called_with(['portageq', 'match', '/', 'tinyxml[stl]']) # Test checking for two packages that we have installed m = Mock(side_effect=[['sys-devel/gcc-4.5.3-r2'], ['dev-libs/tinyxml-2.6.2-r1']]) val = portage_detect(['tinyxml[stl]', 'gcc'], exec_fn=m) assert val == ['gcc', 'tinyxml[stl]'], 'Result was actually: %s' % val m.assert_any_call(['portageq', 'match', '/', 'tinyxml[stl]']) m.assert_any_call(['portageq', 'match', '/', 'gcc']) # Test checking for two missing packages m = Mock(side_effect=[[], []]) val = portage_detect(['tinyxml[stl]', 'gcc'], exec_fn=m) assert val == [], 'Result was actually: %s' % val m.assert_any_call(['portageq', 'match', '/', 'tinyxml[stl]']) m.assert_any_call(['portageq', 'match', '/', 'gcc']) # Test checking for one missing, one installed package m = Mock(side_effect=[['sys-devel/gcc-4.5.3-r2'], []]) val = portage_detect(['tinyxml[stl]', 'gcc'], exec_fn=m) assert val == ['gcc'], 'Result was actually: %s' % val m.assert_any_call(['portageq', 'match', '/', 'tinyxml[stl]']) m.assert_any_call(['portageq', 'match', '/', 'gcc']) # Test checking for one installed, one missing package (reverse order) m = Mock(side_effect=[[], ['dev-libs/tinyxml-2.6.2-r1']]) val = portage_detect(['tinyxml[stl]', 'gcc'], exec_fn=m) assert val == ['tinyxml[stl]'], 'Result was actually: %s' % val m.assert_any_call(['portageq', 'match', '/', 'tinyxml[stl]']) m.assert_any_call(['portageq', 'match', '/', 'gcc']) # Test duplicates (requesting the same package twice) # TODO what's the desired behavior here m = Mock(side_effect=[['dev-libs/tinyxml-2.6.2-r1'], ['dev-libs/tinyxml-2.6.2-r1']]) val = portage_detect(['tinyxml[stl]', 'tinyxml[stl]'], exec_fn=m) assert val == ['tinyxml[stl]', 'tinyxml[stl]'], 'Result was actually: %s' % val m.assert_any_call(['portageq', 'match', '/', 'tinyxml[stl]']) # and a second of the same, but any_call won't show that. # Test packages with multiple slot m = Mock( side_effect=[['dev-lang/python-2.7.2-r3', 'dev-lang/python-3.2.2']]) val = portage_detect(['python'], exec_fn=m) assert val == ['python'], 'Result was actually: %s' % val m.assert_any_call(['portageq', 'match', '/', 'python'])
def setUp(self): mocked_xml_func = Mock() mocked_xml_func.return_value = one_position self.command = syncjobvite.Command() self.command._get_jobvite_xml = mocked_xml_func
def test_open_direct_fail_exception(self): mock_glfs_open = Mock() mock_glfs_open.return_value = None with patch("gluster.api.client.glfs_open", mock_glfs_open): self.assertRaises(OSError, self.vol.open, "file.txt", os.O_RDONLY)
def test_users_to_zerver_userprofile( self, mock_get_model_id: mock.Mock, mock_get_data_file: mock.Mock) -> None: user_data = [{ "id": "U08RGD1RD", "name": "john", "deleted": False, "real_name": "John Doe", "profile": { "image_32": "", "email": "*****@*****.**" } }, { "id": "U0CBK5KAT", "is_admin": True, "is_bot": False, "is_owner": True, "is_primary_owner": True, 'name': 'Jane', "real_name": "Jane Doe", "deleted": False, "profile": { "image_32": "https:\/\/secure.gravatar.com\/avatar\/random.png", "email": "*****@*****.**" } }, { "id": "U09TYF5Sk", "name": "Bot", "real_name": "Bot", "is_bot": True, "deleted": False, "profile": { "image_32": "https:\/\/secure.gravatar.com\/avatar\/random1.png", "email": "*****@*****.**" } }] mock_get_data_file.return_value = user_data # As user with slack_id 'U0CBK5KAT' is the primary owner, that user should be imported first # and hence has zulip_id = 1 test_added_users = {'U08RGD1RD': 2, 'U0CBK5KAT': 1, 'U09TYF5Sk': 3} slack_data_dir = './random_path' timestamp = int(timezone_now().timestamp()) zerver_userprofile, added_users = users_to_zerver_userprofile( slack_data_dir, 1, timestamp, 'test_domain') # test that the primary owner should always be imported first self.assertDictEqual(added_users, test_added_users) self.assertEqual(zerver_userprofile[1]['id'], test_added_users['U0CBK5KAT']) self.assertEqual(len(zerver_userprofile), 3) self.assertEqual(zerver_userprofile[1]['id'], 1) self.assertEqual(zerver_userprofile[1]['is_realm_admin'], True) self.assertEqual(zerver_userprofile[1]['is_staff'], False) self.assertEqual(zerver_userprofile[1]['is_active'], True) self.assertEqual(zerver_userprofile[0]['is_staff'], False) self.assertEqual(zerver_userprofile[0]['is_bot'], False) self.assertEqual(zerver_userprofile[0]['enable_desktop_notifications'], True) self.assertEqual(zerver_userprofile[2]['bot_type'], 1)
def test_populate(self, load_mock): provider = Ns1Provider('test', 'api-key') # Bad auth load_mock.side_effect = AuthException('unauthorized') zone = Zone('unit.tests.', []) with self.assertRaises(AuthException) as ctx: provider.populate(zone) self.assertEquals(load_mock.side_effect, ctx.exception) # General error load_mock.reset_mock() load_mock.side_effect = ResourceException('boom') zone = Zone('unit.tests.', []) with self.assertRaises(ResourceException) as ctx: provider.populate(zone) self.assertEquals(load_mock.side_effect, ctx.exception) self.assertEquals(('unit.tests', ), load_mock.call_args[0]) # Non-existent zone doesn't populate anything load_mock.reset_mock() load_mock.side_effect = \ ResourceException('server error: zone not found') zone = Zone('unit.tests.', []) exists = provider.populate(zone) self.assertEquals(set(), zone.records) self.assertEquals(('unit.tests', ), load_mock.call_args[0]) self.assertFalse(exists) # Existing zone w/o records load_mock.reset_mock() nsone_zone = DummyZone([]) load_mock.side_effect = [nsone_zone] zone_search = Mock() zone_search.return_value = [ { "domain": "geo.unit.tests", "zone": "unit.tests", "type": "A", "answers": [ { 'answer': ['1.1.1.1'], 'meta': {} }, { 'answer': ['1.2.3.4'], 'meta': { 'ca_province': ['ON'] } }, { 'answer': ['2.3.4.5'], 'meta': { 'us_state': ['NY'] } }, { 'answer': ['3.4.5.6'], 'meta': { 'country': ['US'] } }, { 'answer': ['4.5.6.7'], 'meta': { 'iso_region_code': ['NA-US-WA'] } }, ], 'ttl': 34, }, ] nsone_zone.search = zone_search zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals(1, len(zone.records)) self.assertEquals(('unit.tests', ), load_mock.call_args[0]) # Existing zone w/records load_mock.reset_mock() nsone_zone = DummyZone(self.nsone_records) load_mock.side_effect = [nsone_zone] zone_search = Mock() zone_search.return_value = [ { "domain": "geo.unit.tests", "zone": "unit.tests", "type": "A", "answers": [ { 'answer': ['1.1.1.1'], 'meta': {} }, { 'answer': ['1.2.3.4'], 'meta': { 'ca_province': ['ON'] } }, { 'answer': ['2.3.4.5'], 'meta': { 'us_state': ['NY'] } }, { 'answer': ['3.4.5.6'], 'meta': { 'country': ['US'] } }, { 'answer': ['4.5.6.7'], 'meta': { 'iso_region_code': ['NA-US-WA'] } }, ], 'ttl': 34, }, ] nsone_zone.search = zone_search zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals(self.expected, zone.records) self.assertEquals(('unit.tests', ), load_mock.call_args[0]) # Test skipping unsupported record type load_mock.reset_mock() nsone_zone = DummyZone(self.nsone_records + [{ 'type': 'UNSUPPORTED', 'ttl': 42, 'short_answers': ['unsupported'], 'domain': 'unsupported.unit.tests.', }]) load_mock.side_effect = [nsone_zone] zone_search = Mock() zone_search.return_value = [ { "domain": "geo.unit.tests", "zone": "unit.tests", "type": "A", "answers": [ { 'answer': ['1.1.1.1'], 'meta': {} }, { 'answer': ['1.2.3.4'], 'meta': { 'ca_province': ['ON'] } }, { 'answer': ['2.3.4.5'], 'meta': { 'us_state': ['NY'] } }, { 'answer': ['3.4.5.6'], 'meta': { 'country': ['US'] } }, { 'answer': ['4.5.6.7'], 'meta': { 'iso_region_code': ['NA-US-WA'] } }, ], 'ttl': 34, }, ] nsone_zone.search = zone_search zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals(self.expected, zone.records) self.assertEquals(('unit.tests', ), load_mock.call_args[0])
import subprocess import re from mock import Mock, patch, create_autospec from yahoo_panoptes.framework.context import PanoptesContext from yahoo_panoptes.framework.enrichment import PanoptesEnrichmentCache, PanoptesEnrichmentCacheKeyValueStore from yahoo_panoptes.framework.plugins.context import PanoptesPluginWithEnrichmentContext, PanoptesPluginContext from yahoo_panoptes.plugins.polling.utilities.polling_status import DEVICE_METRICS_STATES from yahoo_panoptes.framework.resources import PanoptesResource from yahoo_panoptes.framework.utilities.helpers import ordered from yahoo_panoptes.framework.utilities.secrets import PanoptesSecretsStore from tests.mock_redis import PanoptesMockRedis mock_time = Mock() mock_time.return_value = 1512629517.03121 snmp_simulator = None def setup_module_default(plugin_pwd, snmp_sim_listen=u'127.0.0.1:10161', snmp_sim_data_dir=u'data/recording'): global snmp_simulator snmp_sim_data_dir = os.path.join(plugin_pwd, snmp_sim_data_dir) try: snmp_simulator = subprocess.Popen([ 'snmpsimd.py', '--data-dir={}'.format(snmp_sim_data_dir), '--agent-udpv4-endpoint={}'.format(snmp_sim_listen), '--logging-method=null'
def make_fake_connection(self, platform_information=None): get_connection = Mock() get_connection.return_value = get_connection get_connection.remote_module.platform_information = Mock( return_value=platform_information) return get_connection
def test_sync(self, load_mock, create_mock): provider = Ns1Provider('test', 'api-key') desired = Zone('unit.tests.', []) for r in self.expected: desired.add_record(r) plan = provider.plan(desired) # everything except the root NS expected_n = len(self.expected) - 1 self.assertEquals(expected_n, len(plan.changes)) self.assertTrue(plan.exists) # Fails, general error load_mock.reset_mock() create_mock.reset_mock() load_mock.side_effect = ResourceException('boom') with self.assertRaises(ResourceException) as ctx: provider.apply(plan) self.assertEquals(load_mock.side_effect, ctx.exception) # Fails, bad auth load_mock.reset_mock() create_mock.reset_mock() load_mock.side_effect = \ ResourceException('server error: zone not found') create_mock.side_effect = AuthException('unauthorized') with self.assertRaises(AuthException) as ctx: provider.apply(plan) self.assertEquals(create_mock.side_effect, ctx.exception) # non-existent zone, create load_mock.reset_mock() create_mock.reset_mock() load_mock.side_effect = \ ResourceException('server error: zone not found') # ugh, need a mock zone with a mock prop since we're using getattr, we # can actually control side effects on `meth` with that. mock_zone = Mock() mock_zone.add_SRV = Mock() mock_zone.add_SRV.side_effect = [ RateLimitException('boo', period=0), None, ] create_mock.side_effect = [mock_zone] got_n = provider.apply(plan) self.assertEquals(expected_n, got_n) # Update & delete load_mock.reset_mock() create_mock.reset_mock() nsone_zone = DummyZone(self.nsone_records + [{ 'type': 'A', 'ttl': 42, 'short_answers': ['9.9.9.9'], 'domain': 'delete-me.unit.tests.', }]) nsone_zone.data['records'][0]['short_answers'][0] = '2.2.2.2' nsone_zone.loadRecord = Mock() zone_search = Mock() zone_search.return_value = [ { "domain": "geo.unit.tests", "zone": "unit.tests", "type": "A", "answers": [ { 'answer': ['1.1.1.1'], 'meta': {} }, { 'answer': ['1.2.3.4'], 'meta': { 'ca_province': ['ON'] } }, { 'answer': ['2.3.4.5'], 'meta': { 'us_state': ['NY'] } }, { 'answer': ['3.4.5.6'], 'meta': { 'country': ['US'] } }, { 'answer': ['4.5.6.7'], 'meta': { 'iso_region_code': ['NA-US-WA'] } }, ], 'ttl': 34, }, ] nsone_zone.search = zone_search load_mock.side_effect = [nsone_zone, nsone_zone] plan = provider.plan(desired) self.assertEquals(3, len(plan.changes)) self.assertIsInstance(plan.changes[0], Update) self.assertIsInstance(plan.changes[2], Delete) # ugh, we need a mock record that can be returned from loadRecord for # the update and delete targets, we can add our side effects to that to # trigger rate limit handling mock_record = Mock() mock_record.update.side_effect = [ RateLimitException('one', period=0), None, None, ] mock_record.delete.side_effect = [ RateLimitException('two', period=0), None, None, ] nsone_zone.loadRecord.side_effect = [ mock_record, mock_record, mock_record ] got_n = provider.apply(plan) self.assertEquals(3, got_n) nsone_zone.loadRecord.assert_has_calls([ call('unit.tests', u'A'), call('geo', u'A'), call('delete-me', u'A'), ]) mock_record.assert_has_calls([ call.update(answers=[{ 'answer': [u'1.2.3.4'], 'meta': {} }], filters=[], ttl=32), call.update(answers=[{ u'answer': [u'1.2.3.4'], u'meta': {} }], filters=[], ttl=32), call.update(answers=[ { u'answer': [u'101.102.103.104'], u'meta': {} }, { u'answer': [u'101.102.103.105'], u'meta': {} }, { u'answer': [u'201.202.203.204'], u'meta': { u'iso_region_code': [u'NA-US-NY'] }, }, ], filters=[ { u'filter': u'shuffle', u'config': {} }, { u'filter': u'geotarget_country', u'config': {} }, { u'filter': u'select_first_n', u'config': { u'N': 1 } }, ], ttl=34), call.delete(), call.delete() ])
def test_read_fail_exception(self): mock_glfs_read = Mock() mock_glfs_read.return_value = -1 with patch("gluster.gfapi.api.glfs_read", mock_glfs_read): self.assertRaises(OSError, self.fd.read, 5)
""" Firefox for iOS and Firefox Aurora for Android don't have the /all/ page. Also, Firefox for Android doesn't have the ESR channel. """ resp = self.client.get(self._get_url('ios')) self.assertEqual(resp.status_code, 404) resp = self.client.get(self._get_url('android', 'aurora')) self.assertEqual(resp.status_code, 404) resp = self.client.get(self._get_url('android', 'organizations')) self.assertEqual(resp.status_code, 404) none_mock = Mock() none_mock.return_value = None @patch.object(fx_views.WhatsnewView, 'redirect_to', none_mock) @patch('bedrock.firefox.views.l10n_utils.render', return_value=HttpResponse()) class TestWhatsNew(TestCase): def setUp(self): self.view = fx_views.WhatsnewView.as_view() self.rf = RequestFactory(HTTP_USER_AGENT='Firefox') @override_settings(DEV=True) def test_can_post(self, render_mock): """Home page must accept post for newsletter signup.""" req = self.rf.post('/en-US/firefox/whatsnew/') self.view(req) # would return 405 before calling render otherwise
def test_init_sts_external_id(self): mock_svc1 = Mock(spec_set=_AwsService) mock_svc2 = Mock(spec_set=_AwsService) mock_foo = Mock(spec_set=_AwsService) mock_bar = Mock(spec_set=_AwsService) mock_ta = Mock(spec_set=TrustedAdvisor) mock_foo.return_value = mock_svc1 mock_bar.return_value = mock_svc2 svcs = {'SvcFoo': mock_foo, 'SvcBar': mock_bar} with patch.dict('awslimitchecker.checker._services', values=svcs, clear=True): with patch.multiple( 'awslimitchecker.checker', logger=DEFAULT, _get_version_info=DEFAULT, TrustedAdvisor=DEFAULT, autospec=True, ) as mocks: mock_version = mocks['_get_version_info'] mock_version.return_value = self.mock_ver_info mock_ta_constr = mocks['TrustedAdvisor'] mocks['TrustedAdvisor'].return_value = mock_ta cls = AwsLimitChecker( account_id='123456789012', account_role='myrole', region='myregion', external_id='myextid', mfa_serial_number=None, mfa_token=None ) # dict should be of _AwsService instances services = { 'SvcFoo': mock_svc1, 'SvcBar': mock_svc2 } assert cls.services == services # _AwsService instances should exist, but have no other calls assert mock_foo.mock_calls == [ call(80, 99, None, '123456789012', 'myrole', 'myregion', 'myextid', None, None) ] assert mock_bar.mock_calls == [ call(80, 99, None, '123456789012', 'myrole', 'myregion', 'myextid', None, None) ] assert mock_ta_constr.mock_calls == [ call( services, account_id='123456789012', account_role='myrole', region='myregion', external_id='myextid', mfa_serial_number=None, mfa_token=None, profile_name=None ) ] assert mock_svc1.mock_calls == [] assert mock_svc2.mock_calls == [] assert self.mock_version.mock_calls == [call()] assert self.cls.vinfo == self.mock_ver_info
def test_users_to_zerver_userprofile(self, mock_get_data_file: mock.Mock) -> None: custom_profile_field_user1 = {"Xf06054BBB": {"value": "random1"}, "Xf023DSCdd": {"value": "employee"}} custom_profile_field_user2 = {"Xf06054BBB": {"value": "random2"}, "Xf023DSCdd": {"value": "employer"}} user_data = [{"id": "U08RGD1RD", "team_id": "T5YFFM2QY", "name": "john", "deleted": False, "real_name": "John Doe", "profile": {"image_32": "", "email": "*****@*****.**", "avatar_hash": "hash", "fields": custom_profile_field_user1}}, {"id": "U0CBK5KAT", "team_id": "T5YFFM2QY", "is_admin": True, "is_bot": False, "is_owner": True, "is_primary_owner": True, 'name': 'Jane', "real_name": "Jane Doe", "deleted": False, "profile": {"image_32": "https:\/\/secure.gravatar.com\/avatar\/random.png", "fields": custom_profile_field_user2, "email": "*****@*****.**", "avatar_hash": "hash"}}, {"id": "U09TYF5Sk", "team_id": "T5YFFM2QY", "name": "Bot", "real_name": "Bot", "is_bot": True, "deleted": False, "profile": {"image_32": "https:\/\/secure.gravatar.com\/avatar\/random1.png", "email": "*****@*****.**", "avatar_hash": "hash"}}] mock_get_data_file.return_value = user_data # As user with slack_id 'U0CBK5KAT' is the primary owner, that user should be imported first # and hence has zulip_id = 1 test_added_users = {'U08RGD1RD': 1, 'U0CBK5KAT': 0, 'U09TYF5Sk': 2} slack_data_dir = './random_path' timestamp = int(timezone_now().timestamp()) mock_get_data_file.return_value = user_data zerver_userprofile, avatar_list, added_users, customprofilefield, \ customprofilefield_value = users_to_zerver_userprofile(slack_data_dir, user_data, 1, timestamp, 'test_domain') # Test custom profile fields self.assertEqual(customprofilefield[0]['field_type'], 1) self.assertEqual(customprofilefield[1]['name'], 'slack custom field 2') self.assertEqual(len(customprofilefield_value), 4) self.assertEqual(customprofilefield_value[0]['field'], 0) self.assertEqual(customprofilefield_value[0]['user_profile'], 1) self.assertEqual(customprofilefield_value[2]['user_profile'], 0) # test that the primary owner should always be imported first self.assertDictEqual(added_users, test_added_users) self.assertEqual(len(avatar_list), 3) self.assertEqual(zerver_userprofile[1]['id'], test_added_users['U0CBK5KAT']) self.assertEqual(len(zerver_userprofile), 3) self.assertEqual(zerver_userprofile[1]['id'], 0) self.assertEqual(zerver_userprofile[1]['is_realm_admin'], True) self.assertEqual(zerver_userprofile[1]['is_staff'], False) self.assertEqual(zerver_userprofile[1]['is_active'], True) self.assertEqual(zerver_userprofile[0]['is_staff'], False) self.assertEqual(zerver_userprofile[0]['is_bot'], False) self.assertEqual(zerver_userprofile[0]['enable_desktop_notifications'], True) self.assertEqual(zerver_userprofile[2]['bot_type'], 1) self.assertEqual(zerver_userprofile[2]['avatar_source'], 'U')
import datetime import json import re import requests from mock import patch, Mock from speeches.tests import InstanceTestCase from speeches.importers.import_akomantoso import ImportAkomaNtoso from speeches.models import Speech, Speaker, Section from speeches.importers import import_akomantoso, import_popolo m = Mock() m.return_value = open( 'speeches/fixtures/test_inputs/Debate_Bungeni_1995-10-31.xml', 'rb') @patch.object(import_akomantoso, 'urlopen', m) class AkomaNtosoImportTestCase(InstanceTestCase): def setUp(self): super(AkomaNtosoImportTestCase, self).setUp() self.importer = ImportAkomaNtoso(instance=self.instance, commit=True) def _list_sections(self): # Make mapping {section name: list of its speeches' texts} sections = {section.id: [] for section in Section.objects.all()} for speech in Speech.objects.all(): if speech.section_id: sections[speech.section_id].append(speech.text) return { section.title: sections[section.id] for section in Section.objects.all()
def test_rmdir_fail_exception(self): mock_glfs_rmdir = Mock() mock_glfs_rmdir.return_value = -1 with patch("gluster.gfapi.api.glfs_rmdir", mock_glfs_rmdir): self.assertRaises(OSError, self.vol.rmdir, "testdir")
def test_setup_listener(self): # sub in mocks for _declare_exchange, _declare_queue, _bind mxp = Mock() mdq = Mock() mdq.return_value = sentinel.anon_queue mb = Mock() def create_channel(): ch = RecvChannel() ch._declare_exchange = mxp ch._declare_queue = mdq ch._bind = mb return ch ch = create_channel() self.assertFalse(ch._setup_listener_called) # call setup listener, defining xp, queue, binding ch.setup_listener( NameTrio(sentinel.xp, sentinel.queue, sentinel.binding)) self.assertTrue(hasattr(ch, '_recv_name')) self.assertTrue(hasattr(ch._recv_name, 'exchange')) self.assertTrue(hasattr(ch._recv_name, 'queue')) self.assertEquals(ch._recv_name.exchange, sentinel.xp) self.assertEquals(ch._recv_name.queue, sentinel.queue) mxp.assert_called_once_with(sentinel.xp) mdq.assert_called_once_with(sentinel.queue) mb.assert_called_once_with(sentinel.binding) # you can only call setup_listener once self.assertTrue(ch._setup_listener_called) # calling it again does nothing, does not touch anything ch.setup_listener(NameTrio(sentinel.xp2, sentinel.queue2)) self.assertTrue(hasattr(ch._recv_name, 'exchange')) self.assertTrue(hasattr(ch._recv_name, 'queue')) self.assertEquals(ch._recv_name.exchange, sentinel.xp) self.assertEquals(ch._recv_name.queue, sentinel.queue) mxp.assert_called_once_with(sentinel.xp) mdq.assert_called_once_with(sentinel.queue) mb.assert_called_once_with(sentinel.binding) # call setup listener, passing a custom bind this time ch = create_channel() ch.setup_listener(NameTrio(sentinel.xp2, sentinel.queue2), binding=sentinel.binding) mxp.assert_called_with(sentinel.xp2) mdq.assert_called_with(sentinel.queue2) mb.assert_called_with(sentinel.binding) # call setup_listener, use anonymous queue name and no binding (will get return value we set above) ch = create_channel() ch.setup_listener(NameTrio(sentinel.xp3)) mxp.assert_called_with(sentinel.xp3) mdq.assert_called_with(None) mb.assert_called_with(sentinel.anon_queue) # call setup_listener with anon queue name but with binding ch = create_channel() ch.setup_listener(NameTrio(sentinel.xp4), binding=sentinel.binding2) mxp.assert_called_with(sentinel.xp4) mdq.assert_called_with(None) mb.assert_called_with(sentinel.binding2)
def test_unlink_fail_exception(self): mock_glfs_unlink = Mock() mock_glfs_unlink.return_value = -1 with patch("gluster.gfapi.api.glfs_unlink", mock_glfs_unlink): self.assertRaises(OSError, self.vol.unlink, "file.txt")
def test_channels_to_zerver_stream(self, mock_get_data_file: mock.Mock) -> None: added_users = { "U061A1R2R": 1, "U061A3E0G": 8, "U061A5N1G": 7, "U064KUGRJ": 5 } zerver_userprofile = [{'id': 1}, {'id': 8}, {'id': 7}, {'id': 5}] realm_id = 3 channel_data = [{ 'id': "C061A0WJG", 'name': 'random', 'created': '1433558319', 'is_general': False, 'members': ['U061A1R2R', 'U061A5N1G'], 'is_archived': True, 'topic': { 'value': 'random' }, 'purpose': { 'value': 'no purpose' } }, { 'id': "C061A0YJG", 'name': 'general', 'created': '1433559319', 'is_general': False, 'is_archived': False, 'members': ['U061A1R2R', 'U061A5N1G', 'U064KUGRJ'], 'topic': { 'value': 'general' }, 'purpose': { 'value': 'general' } }, { 'id': "C061A0YJP", 'name': 'general1', 'created': '1433559319', 'is_general': False, 'is_archived': False, 'members': ['U061A1R2R'], 'topic': { 'value': 'general channel' }, 'purpose': { 'value': 'For everyone' } }, { 'id': "C061A0HJG", 'name': 'feedback', 'created': '1433558359', 'is_general': False, 'members': ['U061A3E0G'], 'is_archived': False, 'topic': { 'value': '' }, 'purpose': { 'value': '' } }] mock_get_data_file.return_value = channel_data channel_to_zerver_stream_output = channels_to_zerver_stream( './random_path', realm_id, added_users, zerver_userprofile) zerver_defaultstream = channel_to_zerver_stream_output[0] zerver_stream = channel_to_zerver_stream_output[1] added_channels = channel_to_zerver_stream_output[2] zerver_subscription = channel_to_zerver_stream_output[3] zerver_recipient = channel_to_zerver_stream_output[4] added_recipient = channel_to_zerver_stream_output[5] test_added_channels = { 'feedback': 3, 'general': 1, 'general1': 2, 'random': 0 } test_added_recipient = { 'feedback': 3, 'general': 1, 'general1': 2, 'random': 0 } # zerver defaultstream already tested in helper functions self.assertEqual(zerver_defaultstream, [{ 'id': 0, 'realm': 3, 'stream': 0 }, { 'id': 1, 'realm': 3, 'stream': 1 }]) self.assertDictEqual(test_added_channels, added_channels) self.assertDictEqual(test_added_recipient, added_recipient) # functioning of zerver subscriptions are already tested in the helper functions # This is to check the concatenation of the output lists from the helper functions # subscriptions for stream self.assertEqual(zerver_subscription[3]['recipient'], 1) self.assertEqual(zerver_subscription[5]['recipient'], 2) # subscription for users self.assertEqual(zerver_subscription[6]['recipient'], 3) self.assertEqual(zerver_subscription[7]['user_profile'], 1) # recipients for stream self.assertEqual(zerver_recipient[1]['id'], zerver_subscription[3]['recipient']) self.assertEqual(zerver_recipient[2]['type_id'], zerver_stream[2]['id']) self.assertEqual(zerver_recipient[0]['type'], 2) # recipients for users (already tested in helped function) self.assertEqual(zerver_recipient[3]['type'], 2) self.assertEqual(zerver_recipient[4]['type'], 1) # stream mapping self.assertEqual(zerver_stream[0]['name'], channel_data[0]['name']) self.assertEqual(zerver_stream[0]['deactivated'], channel_data[0]['is_archived']) self.assertEqual(zerver_stream[0]['description'], 'no purpose') self.assertEqual(zerver_stream[0]['invite_only'], False) self.assertEqual(zerver_stream[0]['realm'], realm_id) self.assertEqual(zerver_stream[2]['id'], test_added_channels[zerver_stream[2]['name']])
def test_write_fail_exception(self): mock_glfs_write = Mock() mock_glfs_write.return_value = -1 with patch("gluster.gfapi.api.glfs_write", mock_glfs_write): self.assertRaises(OSError, self.fd.write, "hello")
def test_chmod_fail_exception(self): mock_glfs_chmod = Mock() mock_glfs_chmod.return_value = -1 with patch("glusterfs.gfapi.api.glfs_chmod", mock_glfs_chmod): self.assertRaises(OSError, self.vol.chmod, "file.txt", 0600)