def popen(monkeypatch): mock_popen = MagicMock() mock_popen.return_value = mock_popen mock_popen.returncode = 0 mock_popen.communicate.return_value = b'{"stream": "stdout"}', b'stderr' monkeypatch.setattr('lizzy.apps.common.Popen', mock_popen) return mock_popen
def gsssc(definition, monkeypatch, event_loop): server_id = 'test-000000' database = MagicMock() publish_cb = MagicMock() use_observant = False ignore_development = True mkdir = MagicMock() monkeypatch.setattr("os.mkdir", mkdir) mkdir.return_value = True chdir = MagicMock() monkeypatch.setattr("os.chdir", chdir) chdir.return_value = True gssa.server.use_observant = False gssa.server.GoSmartSimulationServerComponent._write_identity = MagicMock() gsssc = gssa.server.GoSmartSimulationServerComponent( server_id, database, publish_cb, ignore_development, use_observant ) # In theory, this is redundant but ensures that we are not thinking # about the thread callback timing gsssc._db = MagicMock() fd, fd_coro = magic_coro() fd.side_effect = lambda g, resync=True, allow_many=False: (g, (definition if g == known_guid else False)) gsssc._fetch_definition = fd_coro gsssc._fetch_definition_test = fd gsssc.current[known_guid] = definition return gsssc
def test_respawn_auto_scaling_group(monkeypatch): inst = {'InstanceId': 'myinst-1', 'LaunchConfigurationName': 'lc-1', 'LifecycleState': 'InService'} group = {'LaunchConfigurationName': 'lc-2', 'Instances': [inst], 'MinSize': 1, 'MaxSize': 1, 'DesiredCapacity': 1, 'LoadBalancerNames': ['myelb']} groups = {'AutoScalingGroups': [group]} instance_states = [{'InstanceId': 'myinst-1', 'State': 'InService'}, {'InstanceId': 'myinst-2', 'State': 'InService'}] asg = MagicMock() asg.describe_auto_scaling_groups.return_value = groups def terminate_instance(InstanceId, **kwargs): for i in range(len(instance_states)): if instance_states[i]['InstanceId'] == InstanceId: del instance_states[i] break asg.terminate_instance_in_auto_scaling_group = terminate_instance elb = MagicMock() elb.describe_instance_health.return_value = {'InstanceStates': instance_states} services = {'autoscaling': asg, 'elb': elb} def client(service, region): assert region == 'myregion' return services[service] monkeypatch.setattr('boto3.client', client) monkeypatch.setattr('time.sleep', lambda s: s) respawn_auto_scaling_group('myasg', 'myregion')
def test_render_definition(monkeypatch, popen): senza = Senza('region') senza.logger = MagicMock() mock_named_tempfile = MagicMock() mock_tempfile = MagicMock() mock_tempfile.name = 'lizzy.yaml' mock_named_tempfile.__enter__.return_value = mock_tempfile mock_named_tempfile.return_value = mock_named_tempfile monkeypatch.setattr('tempfile.NamedTemporaryFile', mock_named_tempfile) senza.render_definition('yaml content', 'version42', 'imgversion22', ['Param1=app', 'SecondParam=3']) cmd = 'senza print --region region -o json --force lizzy.yaml version42 ' \ 'imgversion22 Param1=app SecondParam=3' popen.assert_called_with(cmd.split(" "), stdout=-1, stderr=-1) assert not senza.logger.error.called senza.render_definition('yaml content', None, 'imgversion22', ['Param1=app', 'SecondParam=3']) assert not senza.logger.error.called # test error case popen.side_effect = ExecutionError('', '') with pytest.raises(SenzaRenderError): senza.render_definition('yaml content', 'version42', 'imgversion22', ['Param1=app', 'SecondParam=3'])
def test_onRequestAnnounce(gsssc, monkeypatch, definition): random_guid = known_guid status1 = MagicMock() gsssc.onRequestIdentify = MagicMock() status1.return_value = 'unstable' gsssc._db.all = MagicMock() simulations = [{'exit_code': 'tsttmp133', 'status': 'tsttmp134', 'percentage': 0.3, 'guid': random_guid, 'directory': 'home', 'timestamp': 'zerohour', 'validation': 'invalid' }] gsssc._db.all.return_value = simulations monkeypatch.setattr('gssa.error.makeError', status1) gsssc.publish = MagicMock() gsssc.server_id = 123 gsssc.onRequestAnnounce() yield from wait() gsssc.publish.assert_called_with(u'com.gosmartsimulation.announce', 123, random_guid, (0.3, 'unstable'), 'home', 'zerohour', 'invalid') result000 = 1983 assert (result000 == 1983)
def test_config_polling_intensity(self, discovery, get_platform): """Test polling intensity.""" mock_platform = MagicMock() get_platform.return_value = mock_platform mock_device = MagicMock() mock_device.name = 'test_device' mock_platform.get_device.return_value = mock_device self.node.values = { self.primary.value_id: self.primary, self.secondary.value_id: self.secondary, } self.device_config = {self.entity_id: { zwave.CONF_POLLING_INTENSITY: 123, }} values = zwave.ZWaveDeviceEntityValues( hass=self.hass, schema=self.mock_schema, primary_value=self.primary, zwave_config=self.zwave_config, device_config=self.device_config, registry=self.registry ) values._check_entity_ready() self.hass.block_till_done() assert discovery.async_load_platform.called assert self.primary.enable_poll.called assert len(self.primary.enable_poll.mock_calls) == 1 assert self.primary.enable_poll.mock_calls[0][1][0] == 123
def test_respawn_auto_scaling_group_without_elb(monkeypatch): inst = {'InstanceId': 'myinst-1', 'LaunchConfigurationName': 'lc-1', 'LifecycleState': 'InService'} instances = [inst] group = {'AutoScalingGroupName': 'myasg', 'LaunchConfigurationName': 'lc-2', 'Instances': instances, 'MinSize': 1, 'MaxSize': 1, 'DesiredCapacity': 1, 'LoadBalancerNames': []} groups = {'AutoScalingGroups': [group]} asg = MagicMock() asg.describe_auto_scaling_groups.return_value = groups def update_group(**kwargs): instances.append({'InstanceId': 'myinst-2', 'LaunchConfigurationName': 'lc-2', 'LifecycleState': 'InService'}) def terminate_instance(InstanceId, **kwargs): for i in range(len(instances)): if instances[i]['InstanceId'] == InstanceId: del instances[i] break asg.update_auto_scaling_group = update_group asg.terminate_instance_in_auto_scaling_group = terminate_instance services = {'autoscaling': asg} def client(service, *args): return services[service] monkeypatch.setattr('boto3.client', client) monkeypatch.setattr('time.sleep', lambda s: s) respawn_auto_scaling_group('myasg', 'myregion')
def test_component_redis_cluster(monkeypatch): mock_string = "foo" configuration = { "Name": mock_string, "SecurityGroups": "", } info = {'StackName': 'foobar' * 5, 'StackVersion': '0.1'} definition = {"Resources": {}} args = MagicMock() args.region = "foo" mock_string_result = MagicMock() mock_string_result.return_value = mock_string monkeypatch.setattr('senza.components.redis_cluster.resolve_security_groups', mock_string_result) result = component_redis_cluster(definition, configuration, args, info, False, MagicMock()) assert 'RedisReplicationGroup' in result['Resources'] assert mock_string == result['Resources']['RedisReplicationGroup']['Properties']['SecurityGroupIds'] assert 2 == result['Resources']['RedisReplicationGroup']['Properties']['NumCacheClusters'] assert result['Resources']['RedisReplicationGroup']['Properties']['AutomaticFailoverEnabled'] assert 'Engine' in result['Resources']['RedisReplicationGroup']['Properties'] assert 'EngineVersion' in result['Resources']['RedisReplicationGroup']['Properties'] assert 'CacheNodeType' in result['Resources']['RedisReplicationGroup']['Properties'] assert 'CacheSubnetGroupName' in result['Resources']['RedisReplicationGroup']['Properties'] assert 'CacheParameterGroupName' in result['Resources']['RedisReplicationGroup']['Properties'] assert 'RedisSubnetGroup' in result['Resources'] assert 'SubnetIds' in result['Resources']['RedisSubnetGroup']['Properties']
def test_component_auto_scaling_group_configurable_properties2(): definition = {"Resources": {}} configuration = { 'Name': 'Foo', 'InstanceType': 't2.micro', 'Image': 'foo', 'SpotPrice': 0.250 } args = MagicMock() args.region = "foo" info = { 'StackName': 'FooStack', 'StackVersion': 'FooVersion' } result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock()) assert result["Resources"]["FooConfig"]["Properties"]["SpotPrice"] == 0.250 del configuration["SpotPrice"] result = component_auto_scaling_group(definition, configuration, args, info, False, MagicMock()) assert "SpotPrice" not in result["Resources"]["FooConfig"]["Properties"]
def add_change(op, dns_name, rtype, ttl, identifier, weight): if op == 'CREATE': x = MagicMock(weight=weight, identifier=identifier) x.name = "myapp.example.org." x.type = "CNAME" records[identifier] = x return MagicMock(name='change')
def test_print_replace_mustache(monkeypatch): sg = MagicMock() sg.name = 'app-master-mind' sg.id = 'sg-007' monkeypatch.setattr('boto.cloudformation.connect_to_region', lambda x: MagicMock()) monkeypatch.setattr('boto.ec2.connect_to_region', lambda x: MagicMock(get_all_security_groups=lambda: [sg])) monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock()) data = {'SenzaInfo': {'StackName': 'test', 'Parameters': [{'ApplicationId': {'Description': 'Application ID from kio'}}]}, 'SenzaComponents': [{'Configuration': {'ServerSubnets': {'eu-west-1': ['subnet-123']}, 'Type': 'Senza::Configuration'}}, {'AppServer': {'Image': 'AppImage', 'InstanceType': 't2.micro', 'SecurityGroups': ['app-{{Arguments.ApplicationId}}'], 'IamRoles': ['app-{{Arguments.ApplicationId}}'], 'TaupageConfig': {'runtime': 'Docker', 'source': 'foo/bar'}, 'Type': 'Senza::TaupageAutoScalingGroup'}}] } runner = CliRunner() with runner.isolated_filesystem(): with open('myapp.yaml', 'w') as fd: yaml.dump(data, fd) result = runner.invoke(cli, ['print', 'myapp.yaml', '--region=myregion', '123', 'master-mind'], catch_exceptions=False) assert 'AWSTemplateFormatVersion' in result.output assert 'subnet-123' in result.output assert 'app-master-mind' in result.output assert 'sg-007' in result.output
def test_console(monkeypatch): stack = MagicMock(stack_name='test-1') inst = MagicMock() inst.tags = {'aws:cloudformation:stack-name': 'test-1'} ec2 = MagicMock() ec2.get_only_instances.return_value = [inst] ec2.get_console_output.return_value.output = b'**MAGIC-CONSOLE-OUTPUT**' monkeypatch.setattr('boto.ec2.connect_to_region', lambda x: ec2) monkeypatch.setattr('boto.cloudformation.connect_to_region', lambda x: MagicMock(list_stacks=lambda stack_status_filters: [stack])) monkeypatch.setattr('boto.iam.connect_to_region', lambda x: MagicMock()) runner = CliRunner() data = {'SenzaInfo': {'StackName': 'test'}} with runner.isolated_filesystem(): with open('myapp.yaml', 'w') as fd: yaml.dump(data, fd) result = runner.invoke(cli, ['console', 'myapp.yaml', '--region=myregion', '1'], catch_exceptions=False) assert '**MAGIC-CONSOLE-OUTPUT**' in result.output result = runner.invoke(cli, ['console', 'foobar', '--region=myregion'], catch_exceptions=False) assert '' == result.output result = runner.invoke(cli, ['console', '172.31.1.2', '--region=myregion'], catch_exceptions=False) assert '**MAGIC-CONSOLE-OUTPUT**' in result.output result = runner.invoke(cli, ['console', 'i-123', '--region=myregion'], catch_exceptions=False) assert '**MAGIC-CONSOLE-OUTPUT**' in result.output
def record(dns_identifier, weight): rec = MagicMock(name=dns_identifier + '-record', weight=weight, identifier=dns_identifier, type='CNAME') rec.name = 'myapp.example.org.' return rec
def test_correctly_invokes_parameter_validation(self, fake_check_unknown): fake_request = MagicMock() fake_request.args = MultiDict() lookup = {'_id': 'my_item'} instance = self._make_one() instance.find_one(fake_request, **lookup) self.assertTrue(fake_check_unknown.called) args, kwargs = fake_check_unknown.call_args self.assertGreater(len(args), 0) self.assertEqual(args[0], fake_request) self.assertEqual(kwargs.get('allow_filtering'), False) expected_whitelist = sorted(['exclude_fields', 'include_fields']) whitelist_arg = kwargs.get('whitelist') if whitelist_arg is not None: # NOTE: the whitelist argument is converted to a list, because any # iterable type is valid, not just lists self.assertEqual(sorted(list(whitelist_arg)), expected_whitelist) else: # whitelist can also be passed as a positional argument self.assertGreater(len(args), 1) self.assertEqual(sorted(list(args[1])), expected_whitelist)
def test_downloadNextFile_subdirectory(download_project, servers, project): one = servers.getRemoteServer("http", "127.0.0.1", 8000, None) download_project._files_to_download = [ {"path": "a/b/c", "md5sum": "d8e8fca2dc0f896fd7cb4cb0031ba249", "server": one} ] download_project._total_files_to_download = 4 download_project._is_running = True mark_finished = MagicMock() download_project.finished.connect(mark_finished) progress = MagicMock() download_project.updated.connect(progress) download_project._downloadNextFile() assert not mark_finished.called progress.assert_called_once_with(75) assert os.path.exists(os.path.join(project.filesDir(), "a", "b", "c")) assert project.get.called args, kwargs = project.get.call_args assert args[0] == one assert args[1] == "/files/a/b/c" assert args[2] == download_project._downloadFileReceived assert kwargs["context"] is not None assert kwargs["downloadProgressCallback"] == download_project._downloadFileProgress
class DelayRequestBehaviourTests(TestCase): def setUp(self): sleep_patcher = patch('uncertainty.behaviours.sleep') self.sleep_mock = sleep_patcher.start() self.addCleanup(self.sleep_mock.stop) self.get_response_mock = MagicMock() self.request_mock = MagicMock() self.some_behaviour = MagicMock() self.some_seconds = MagicMock() self.delay_request_behaviour = DelayRequestBehaviour(self.some_behaviour, self.some_seconds) def test_calls_encapsulated_behaviour(self): """Tests that DelayResponseBehaviour calls the encapsulated behaviour""" self.delay_request_behaviour(self.get_response_mock, self.request_mock) self.some_behaviour.assert_called_once_with(self.get_response_mock, self.request_mock) def test_returns_result_of_encapsulated_behaviour(self): """Tests that DelayResponseBehaviour returns the result of calling the encapsulated behaviour""" self.assertEqual(self.some_behaviour.return_value, self.delay_request_behaviour(self.get_response_mock, self.request_mock)) def test_calls_sleep(self): """Tests that DelayResponseBehaviour calls sleep for the given seconds""" self.delay_request_behaviour(self.get_response_mock, self.request_mock) self.sleep_mock.assert_called_once_with(self.some_seconds) def test_delay_is_delay_request_response_behaviour(self): """Tests that delay is an alias for DelayResponseBehaviour""" self.assertEqual(delay_request, DelayRequestBehaviour)
def test_multiple_events(self): request_headers_mock = MagicMock(return_value=self.mock_headers) http_method = self.respond_with(text="""[ { "timestamp":"2015-08-24T01:16:22.327Z", "host":"10.0.1.232", "message":"[WARN] [04/21/2015 12:54:30.079] [doc-renderer-cluster-1-akka.remote.default-remote-dispatcher-22] Association with remote system has failed." }, { "timestamp":"2015-08-24T01:16:25.327Z", "host":"10.0.1.232", "message":"[WARN] [04/21/2015 12:54:36.079] [doc-renderer-cluster-1-akka.remote.default-remote-dispatcher-26] Association with remote system has failed." } ]""") quote_method = MagicMock(return_value=self.bundle_id_urlencoded) stdout = MagicMock() input_args = MagicMock(**self.default_args) with patch('requests.get', http_method), \ patch('conductr_cli.conduct_url.request_headers', request_headers_mock), \ patch('urllib.parse.quote', quote_method): logging_setup.configure_logging(input_args, stdout) result = conduct_logs.logs(input_args) self.assertTrue(result) request_headers_mock.assert_called_with(input_args) http_method.assert_called_with(self.default_url, timeout=DEFAULT_HTTP_TIMEOUT, headers=self.mock_headers) self.assertEqual( strip_margin("""|TIME HOST LOG |2015-08-24T01:16:22Z 10.0.1.232 [WARN] [04/21/2015 12:54:30.079] [doc-renderer-cluster-1-akka.remote.default-remote-dispatcher-22] Association with remote system has failed. |2015-08-24T01:16:25Z 10.0.1.232 [WARN] [04/21/2015 12:54:36.079] [doc-renderer-cluster-1-akka.remote.default-remote-dispatcher-26] Association with remote system has failed. |"""), self.output(stdout))
def setUp(self): mock_github_conector = MagicMock() mock_github_conector.read_all = MagicMock(return_value=[{"name": "scikit-aero", 'forks_count': '2', 'watchers_count': '3', 'stargazers_count': '3'}]) self.repo = ProjectRepositoryService(mock_github_conector)
async def test_201_run(self): rc = MagicMock(spec=RestClient) pilots = {'a':{}} # try tasks error async def client(method, url, args=None): logger.info('REST: %s, %s', method, url) if url.startswith('/dataset_summaries'): return {'processing':['foo']} else: raise Exception() rc.request = client with self.assertRaises(Exception): await reset_tasks.run(rc, debug=True) # check it normally hides the error await reset_tasks.run(rc, debug=False) # try dataset level error async def client(method, url, args=None): raise Exception() rc.request = client with self.assertRaises(Exception): await reset_tasks.run(rc, debug=True) # check it normally hides the error await reset_tasks.run(rc, debug=False)
def test_process_data_redirects_to_failure_on_payment_failure(self): self.payment.status = 'reject' provider = WireProvider() request = MagicMock() request.GET = {} response = provider.process_data(self.payment, request) self.assertEqual(response['location'], self.payment.get_failure_url())
def test_requesting_scikit_aero_project(self): view_mock = MagicMock() repo_service_mock = MagicMock() repo_service_mock.find = MagicMock(return_value=Project(2, 3, 3)) presenter = GIndexPresenter(view_mock, repo_service_mock) presenter.request_gindex_for("Pybonacci", "scikit-aero") view_mock.show_gindex.assert_called_with(12)
def assert_mock_get_hymn(self, hymn_type, hymn_number, stored_content_path = None, query_params = tuple()): stubbed_path = GetSong.HYMN_PATH_FORMAT % (hymn_type, hymn_number) # url to stub out url = GetSong.GET_SONG_URL_FORMAT % stubbed_path stubbed_url = Utils.add_query_to_url(url, query_params) # mock out hymnal.net response # https://docs.python.org/3/library/unittest.mock.html mock_response = Mock() mock_data_format = Utils.add_query_to_url('test_data/get_song_html_{}_{}', query_params) mock_data_format += '.txt' with open(mock_data_format.format(hymn_type, hymn_number), 'r') as m: mock_response.text = m.read() # key order doesn't matter for dict equality, so compare query parameter dicts def get_url(url): parsed_url = urllib.parse.urlparse(url) params = urllib.parse.parse_qsl(parsed_url.query) assert_equal(dict(query_params), dict(params)) return mock_response # http://stackoverflow.com/questions/15753390/python-mock-requests-and-the-response with patch('requests.get', Mock(side_effect=get_url)) as n: self.assert_get_hymn(hymn_type, hymn_number, query_params, stored_content_path)
def test_get_overlays_inactive(self, mock_projector_overlays): mock_overlay = MagicMock() mock_overlay.name = 'mock_overlay_2' mock_overlay.is_active.return_value = False mock_projector_overlays.send.return_value = ((None, mock_overlay), ) value = projector_api.get_overlays(only_active=True) self.assertNotEqual(value, {'mock_overlay_2': mock_overlay})
def test_single_unhandled(self): callback = MagicMock(return_value=False) self.stack.register(callback) self.assertRaises(RuntimeError, self.stack, 666) callback.assert_called_once_with(666) self.stack.unregister(callback) self.assertRaises(RuntimeError, self.stack)
def test_update_projector_overlay(self, mock_ProjectorSocketHandler, mock_get_overlays): mock_overlay = MagicMock() mock_overlay.name = 'mock_overlay_name' mock_overlay.get_projector_html.return_value = 'mock_html_code' mock_overlay.get_javascript.return_value = 'mock_javascript' mock_get_overlays.return_value = {'mock_overlay': mock_overlay} # Test with active overlay mock_overlay.is_active.return_value = False projector_api.update_projector_overlay(None) mock_ProjectorSocketHandler.send_updates.assert_called_with( {'overlays': {'mock_overlay_name': None}}) # Test with active overlay mock_overlay.is_active.return_value = True projector_api.update_projector_overlay(None) expected_data = {'overlays': {'mock_overlay_name': { 'html': 'mock_html_code', 'javascript': 'mock_javascript'}}} mock_ProjectorSocketHandler.send_updates.assert_called_with(expected_data) # Test with overlay name as argument projector_api.update_projector_overlay('mock_overlay') mock_ProjectorSocketHandler.send_updates.assert_called_with(expected_data) # Test with overlay object as argument projector_api.update_projector_overlay(mock_overlay) mock_ProjectorSocketHandler.send_updates.assert_called_with(expected_data)
def test__post_save_receiver(self, Activity, ContentType): ct = MagicMock() instance = MagicMock() instance.pk = 1 activity = MagicMock() ContentType.objects.get_for_model.return_value = ct Activity.return_value = activity mediator = ActivityMediator() mediator.alter = MagicMock(return_value=activity) mediator.prepare_snapshot = MagicMock(return_value=None) mediator.render = MagicMock() # it will be called by notifiers mediator._post_save_receiver(None, instance, created=True) Activity.assert_called_with(content_type=ct, object_id=instance.pk, status='created') # user defined alternation code is called mediator.alter.assert_called_with(instance, activity) # activity save method is called activity.save.assert_called_with() mediator._post_save_receiver(None, instance, created=False) Activity.assert_called_with(content_type=ct, object_id=instance.pk, status='updated') # user defined alternation code is called mediator.alter.assert_called_with(instance, activity) # user defined snapshot preparation code is called mediator.prepare_snapshot.assert_called_with(instance, activity) # activity save method is called activity.save.assert_called_with()
def test__m2m_changed_receiver(self, Activity, ContentType): ct = MagicMock() instance = MagicMock() instance.pk = 1 activity = MagicMock() ContentType.objects.get_for_model.return_value = ct Activity.return_value = activity mediator = ActivityMediator() mediator.alter = MagicMock(return_value=activity) mediator.prepare_snapshot = MagicMock(return_value=None) mediator.render = MagicMock() # it will be called by notifiers mediator._m2m_changed_receiver(None, instance, action='pre_add', reverse=False) # user defined alternation code is called mediator.alter.assert_called_with(instance, None, action='pre_add', reverse=False) # user defined snapshot preparation code is called mediator.prepare_snapshot.assert_called_with(instance, activity, action='pre_add', reverse=False) # activity save method is called activity.save.assert_called_with()
def test_sendPkt(): """ Checks if the modified sleep function works right. """ # mocking socket mocked_socket = MagicMock() mocked_socket.sendto = MagicMock(return_value=0) # mocking sleep picture_cast_todo.sleep = MagicMock(return_value=0) # run function picture_cast_todo.sendPkt(mocked_socket, 0, 0, 10) # check if mocked functions were called try: picture_cast_todo.sleep.assert_called_once_with(10 / 1000) except: print("The delay in sendPkt is not correct!") print(sys.exc_info()) return -1 return 0
def FileIO(self, name, mode): """Proxy for tensorflow.python.lib.io.file_io.FileIO class. Mocks the class if a real GCS bucket is not available for testing. """ self._check_started() if not self.mock_gcs: return tf_file_io.FileIO(name, mode) filepath = name if filepath.startswith(self._gcs_prefix): mock_fio = MagicMock() mock_fio.__enter__ = Mock(return_value=mock_fio) if mode == 'rb': if filepath not in self.local_objects: raise IOError('{} does not exist'.format(filepath)) self.local_objects[filepath].seek(0) mock_fio.read = self.local_objects[filepath].read elif mode == 'wb': self.local_objects[filepath] = BytesIO() mock_fio.write = self.local_objects[filepath].write else: raise ValueError( '{} only supports wrapping of FileIO for `mode` "rb" or "wb"') return mock_fio return open(filepath, mode)
class TestEventListener(TestCase): def setUp(self): self._cofunc_call_count = 0 self._cofunc_calls = [] @asyncio.coroutine def a(event): nonlocal self self._cofunc_call_count += 1 self._cofunc_calls.append(event) yield from asyncio.sleep(0) self.e = Event("event") self.func = MagicMock(_is_coroutine=False) self.cofunc = a self.func_listener = EventListener(self.func) self.cofunc_listener = EventListener(self.cofunc) def test_func(self): self.func_listener(self.e) self.func.assert_called_once_with(self.e) @sync @asyncio.coroutine def test_cofunc(self): yield from self.cofunc_listener(self.e) self.assertEqual(self._cofunc_call_count, 1) self.assertEqual(self._cofunc_calls[0], self.e)
def workflow_task(): return MagicMock()
class TestUserUseCases(unittest.TestCase): uow = MagicMock() pwd_encoder = MagicMock() def test_list_empty(self): user_usecases = UserUseCases(self.uow, self.pwd_encoder) self.uow.repository.all = MagicMock(return_value=[]) user_query = UserQuery() self.assertEqual([], user_usecases.list(user_query)) def test_list_with_results(self): user_id = UserId(id=str(uuid.uuid4())) user_mock = User( id=user_id, username='******', email='*****@*****.**', password='******' ) self.uow.repository.all = MagicMock(return_value=[user_mock]) user_usecases = UserUseCases(self.uow, self.pwd_encoder) user_query = UserQuery() self.assertEqual(user_usecases.list(user_query), [user_mock]) def test_list_with_filtered_results(self): user_id = UserId(id=str(uuid.uuid4())) user_mock = User( id=user_id, username='******', email='*****@*****.**', password='******' ) self.uow.repository.all = MagicMock(return_value=[user_mock]) user_usecases = UserUseCases(self.uow, self.pwd_encoder) user_query = UserQuery(q='mock') self.assertEqual(user_usecases.list(user_query), [user_mock]) def test_register(self): user_id = UserId(id=str(uuid.uuid4())) user_mock = User( id=user_id, username='******', email='*****@*****.**', password='******' ) self.uow.repository.find_by_email_or_username = MagicMock(return_value=None) self.uow.repository.save = MagicMock(return_value=user_mock) self.pwd_encoder.encode = MagicMock(return_value='aaaa') user_usecases = UserUseCases(self.uow, self.pwd_encoder) user_register = UserCreateCommand( username='******', email='*****@*****.**', password='******' ) self.assertIsNotNone(user_usecases.register(user_register)) def test_register_duplicate_username(self): user_id = UserId(id=str(uuid.uuid4())) user_mock = User( id=user_id, username='******', email='*****@*****.**', password='******' ) previous_user_mock = User( id=user_id, username='******', email='*****@*****.**', password='******' ) self.uow.repository.find_by_email_or_username = MagicMock( return_value=previous_user_mock ) self.uow.repository.save = MagicMock(return_value=user_mock) self.pwd_encoder.encode = MagicMock(return_value='aaaa') user_usecases = UserUseCases(self.uow, self.pwd_encoder) user_register = UserCreateCommand( username='******', email='*****@*****.**', password='******' ) self.assertRaises( UserAlreadyExistException, user_usecases.register, user_register ) def test_update_user_status_to_blocked(self): user_id = UserId(id=str(uuid.uuid4())) user_mock = User( id=user_id, username='******', email='*****@*****.**', password='******', status=UserStatus.ACTIVE, ) self.uow.repository.find_by_id = MagicMock(return_value=user_mock) self.uow.repository.save = MagicMock(return_value=user_mock) user_usecases = UserUseCases(self.uow, self.pwd_encoder) command = UpdateUserStatusCommand( user_id=user_id.id, status=UserStatus.BLOCKED.value ) updated_user = user_usecases.update_status(command) assert updated_user.is_blocked() def test_update_user_status_to_active(self): user_id = UserId(id=str(uuid.uuid4())) user_mock = User( id=user_id, username='******', email='*****@*****.**', password='******', status=UserStatus.BLOCKED, ) self.uow.repository.find_by_id = MagicMock(return_value=user_mock) self.uow.repository.save = MagicMock(return_value=user_mock) user_usecases = UserUseCases(self.uow, self.pwd_encoder) command = UpdateUserStatusCommand( user_id=user_id.id, status=UserStatus.ACTIVE.value ) updated_user = user_usecases.update_status(command) assert not updated_user.is_blocked()
def __getattr__(cls, name): return MagicMock()
def test_init(self): """``dockerpty.io`` the Stream object only requires a file descriptor""" fake_fd = MagicMock() stream = io.Stream(fake_fd) self.assertTrue(isinstance(stream, io.Stream))
def test_select_error(self, fake_select): """``dockerpty.io`` 'select' raises unexpected errors""" fake_select.side_effect = [RuntimeError('testing')] with self.assertRaises(RuntimeError): io.select(MagicMock(), MagicMock())
def test_needs_write_false(self): """``dockerpty.io`` Stream.needs_write Returns False when buffer is empty""" fake_fd = MagicMock() stream = io.Stream(fake_fd) self.assertFalse(stream.needs_write())
def decider(): return MagicMock()
def test_stoploss_order_binance(default_conf, mocker, limitratio, expected, side, trademode): api_mock = MagicMock() order_id = 'test_prod_buy_{}'.format(randint(0, 10 ** 6)) order_type = 'stop_loss_limit' if trademode == TradingMode.SPOT else 'stop' api_mock.create_order = MagicMock(return_value={ 'id': order_id, 'info': { 'foo': 'bar' } }) default_conf['dry_run'] = False default_conf['margin_mode'] = MarginMode.ISOLATED default_conf['trading_mode'] = trademode mocker.patch('freqtrade.exchange.Exchange.amount_to_precision', lambda s, x, y: y) mocker.patch('freqtrade.exchange.Exchange.price_to_precision', lambda s, x, y: y) exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance') with pytest.raises(OperationalException): order = exchange.stoploss( pair='ETH/BTC', amount=1, stop_price=190, side=side, order_types={'stoploss_on_exchange_limit_ratio': 1.05}, leverage=1.0 ) api_mock.create_order.reset_mock() order_types = {} if limitratio is None else {'stoploss_on_exchange_limit_ratio': limitratio} order = exchange.stoploss( pair='ETH/BTC', amount=1, stop_price=220, order_types=order_types, side=side, leverage=1.0 ) assert 'id' in order assert 'info' in order assert order['id'] == order_id assert api_mock.create_order.call_args_list[0][1]['symbol'] == 'ETH/BTC' assert api_mock.create_order.call_args_list[0][1]['type'] == order_type assert api_mock.create_order.call_args_list[0][1]['side'] == side assert api_mock.create_order.call_args_list[0][1]['amount'] == 1 # Price should be 1% below stopprice assert api_mock.create_order.call_args_list[0][1]['price'] == expected if trademode == TradingMode.SPOT: params_dict = {'stopPrice': 220} else: params_dict = {'stopPrice': 220, 'reduceOnly': True} assert api_mock.create_order.call_args_list[0][1]['params'] == params_dict # test exception handling with pytest.raises(DependencyException): api_mock.create_order = MagicMock(side_effect=ccxt.InsufficientFunds("0 balance")) exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance') exchange.stoploss( pair='ETH/BTC', amount=1, stop_price=220, order_types={}, side=side, leverage=1.0) with pytest.raises(InvalidOrderException): api_mock.create_order = MagicMock( side_effect=ccxt.InvalidOrder("binance Order would trigger immediately.")) exchange = get_patched_exchange(mocker, default_conf, api_mock, 'binance') exchange.stoploss( pair='ETH/BTC', amount=1, stop_price=220, order_types={}, side=side, leverage=1.0 ) ccxt_exceptionhandlers(mocker, default_conf, api_mock, "binance", "stoploss", "create_order", retries=1, pair='ETH/BTC', amount=1, stop_price=220, order_types={}, side=side, leverage=1.0)
def test_auto_confirm_without_assume_yes(self): cancel_possibilities = (False, '', 0) for value in cancel_possibilities: confirm = Confirm(value) confirm.input = MagicMock() self.assertFalse(confirm.ask(), value)
def test_resolve_filter_paths(self) -> None: arguments = MagicMock() configuration = MagicMock() original_directory = "/project" arguments.source_directories = [] arguments.targets = [] configuration.local_configuration_root = None filter_paths = _resolve_filter_paths( arguments, configuration, original_directory ) self.assertEqual(filter_paths, set()) arguments.source_directories = ["/project/a"] filter_paths = _resolve_filter_paths( arguments, configuration, original_directory ) self.assertEqual(filter_paths, {"/project/a"}) arguments.source_directories = ["/project/a"] arguments.targets = ["//x/y/..."] filter_paths = _resolve_filter_paths( arguments, configuration, original_directory ) self.assertEqual(filter_paths, {"/project/a", "x/y"}) arguments.source_directories = ["/project/local/a"] arguments.targets = ["//x/y:z"] configuration.local_configuration_root = "project/local" filter_paths = _resolve_filter_paths( arguments, configuration, original_directory ) self.assertEqual(filter_paths, {"/project/local/a", "x/y"}) arguments.source_directories = [] arguments.targets = [] configuration.local_configuration_root = "/project/local" filter_paths = _resolve_filter_paths( arguments, configuration, original_directory ) self.assertEqual(filter_paths, {"/project/local"})
def test_no_auto_confirm_lower(self): confirm = Confirm() confirm.input = MagicMock(return_value='y') self.assertTrue(confirm.ask())
def test_fill_leverage_tiers_binance(default_conf, mocker): api_mock = MagicMock() api_mock.fetch_leverage_tiers = MagicMock(return_value={ 'ADA/BUSD': [ { "tier": 1, "minNotional": 0, "maxNotional": 100000, "maintenanceMarginRate": 0.025, "maxLeverage": 20, "info": { "bracket": "1", "initialLeverage": "20", "maxNotional": "100000", "minNotional": "0", "maintMarginRatio": "0.025", "cum": "0.0" } }, { "tier": 2, "minNotional": 100000, "maxNotional": 500000, "maintenanceMarginRate": 0.05, "maxLeverage": 10, "info": { "bracket": "2", "initialLeverage": "10", "maxNotional": "500000", "minNotional": "100000", "maintMarginRatio": "0.05", "cum": "2500.0" } }, { "tier": 3, "minNotional": 500000, "maxNotional": 1000000, "maintenanceMarginRate": 0.1, "maxLeverage": 5, "info": { "bracket": "3", "initialLeverage": "5", "maxNotional": "1000000", "minNotional": "500000", "maintMarginRatio": "0.1", "cum": "27500.0" } }, { "tier": 4, "minNotional": 1000000, "maxNotional": 2000000, "maintenanceMarginRate": 0.15, "maxLeverage": 3, "info": { "bracket": "4", "initialLeverage": "3", "maxNotional": "2000000", "minNotional": "1000000", "maintMarginRatio": "0.15", "cum": "77500.0" } }, { "tier": 5, "minNotional": 2000000, "maxNotional": 5000000, "maintenanceMarginRate": 0.25, "maxLeverage": 2, "info": { "bracket": "5", "initialLeverage": "2", "maxNotional": "5000000", "minNotional": "2000000", "maintMarginRatio": "0.25", "cum": "277500.0" } }, { "tier": 6, "minNotional": 5000000, "maxNotional": 30000000, "maintenanceMarginRate": 0.5, "maxLeverage": 1, "info": { "bracket": "6", "initialLeverage": "1", "maxNotional": "30000000", "minNotional": "5000000", "maintMarginRatio": "0.5", "cum": "1527500.0" } } ], "ZEC/USDT": [ { "tier": 1, "minNotional": 0, "maxNotional": 50000, "maintenanceMarginRate": 0.01, "maxLeverage": 50, "info": { "bracket": "1", "initialLeverage": "50", "maxNotional": "50000", "minNotional": "0", "maintMarginRatio": "0.01", "cum": "0.0" } }, { "tier": 2, "minNotional": 50000, "maxNotional": 150000, "maintenanceMarginRate": 0.025, "maxLeverage": 20, "info": { "bracket": "2", "initialLeverage": "20", "maxNotional": "150000", "minNotional": "50000", "maintMarginRatio": "0.025", "cum": "750.0" } }, { "tier": 3, "minNotional": 150000, "maxNotional": 250000, "maintenanceMarginRate": 0.05, "maxLeverage": 10, "info": { "bracket": "3", "initialLeverage": "10", "maxNotional": "250000", "minNotional": "150000", "maintMarginRatio": "0.05", "cum": "4500.0" } }, { "tier": 4, "minNotional": 250000, "maxNotional": 500000, "maintenanceMarginRate": 0.1, "maxLeverage": 5, "info": { "bracket": "4", "initialLeverage": "5", "maxNotional": "500000", "minNotional": "250000", "maintMarginRatio": "0.1", "cum": "17000.0" } }, { "tier": 5, "minNotional": 500000, "maxNotional": 1000000, "maintenanceMarginRate": 0.125, "maxLeverage": 4, "info": { "bracket": "5", "initialLeverage": "4", "maxNotional": "1000000", "minNotional": "500000", "maintMarginRatio": "0.125", "cum": "29500.0" } }, { "tier": 6, "minNotional": 1000000, "maxNotional": 2000000, "maintenanceMarginRate": 0.25, "maxLeverage": 2, "info": { "bracket": "6", "initialLeverage": "2", "maxNotional": "2000000", "minNotional": "1000000", "maintMarginRatio": "0.25", "cum": "154500.0" } }, { "tier": 7, "minNotional": 2000000, "maxNotional": 30000000, "maintenanceMarginRate": 0.5, "maxLeverage": 1, "info": { "bracket": "7", "initialLeverage": "1", "maxNotional": "30000000", "minNotional": "2000000", "maintMarginRatio": "0.5", "cum": "654500.0" } } ], }) default_conf['dry_run'] = False default_conf['trading_mode'] = TradingMode.FUTURES default_conf['margin_mode'] = MarginMode.ISOLATED exchange = get_patched_exchange(mocker, default_conf, api_mock, id="binance") exchange.fill_leverage_tiers() assert exchange._leverage_tiers == { 'ADA/BUSD': [ { "min": 0, "max": 100000, "mmr": 0.025, "lev": 20, "maintAmt": 0.0 }, { "min": 100000, "max": 500000, "mmr": 0.05, "lev": 10, "maintAmt": 2500.0 }, { "min": 500000, "max": 1000000, "mmr": 0.1, "lev": 5, "maintAmt": 27500.0 }, { "min": 1000000, "max": 2000000, "mmr": 0.15, "lev": 3, "maintAmt": 77500.0 }, { "min": 2000000, "max": 5000000, "mmr": 0.25, "lev": 2, "maintAmt": 277500.0 }, { "min": 5000000, "max": 30000000, "mmr": 0.5, "lev": 1, "maintAmt": 1527500.0 } ], "ZEC/USDT": [ { 'min': 0, 'max': 50000, 'mmr': 0.01, 'lev': 50, 'maintAmt': 0.0 }, { 'min': 50000, 'max': 150000, 'mmr': 0.025, 'lev': 20, 'maintAmt': 750.0 }, { 'min': 150000, 'max': 250000, 'mmr': 0.05, 'lev': 10, 'maintAmt': 4500.0 }, { 'min': 250000, 'max': 500000, 'mmr': 0.1, 'lev': 5, 'maintAmt': 17000.0 }, { 'min': 500000, 'max': 1000000, 'mmr': 0.125, 'lev': 4, 'maintAmt': 29500.0 }, { 'min': 1000000, 'max': 2000000, 'mmr': 0.25, 'lev': 2, 'maintAmt': 154500.0 }, { 'min': 2000000, 'max': 30000000, 'mmr': 0.5, 'lev': 1, 'maintAmt': 654500.0 }, ] } api_mock = MagicMock() api_mock.load_leverage_tiers = MagicMock() type(api_mock).has = PropertyMock(return_value={'fetchLeverageTiers': True}) ccxt_exceptionhandlers( mocker, default_conf, api_mock, "binance", "fill_leverage_tiers", "fetch_leverage_tiers", )
def test_may_execute_sell_stoploss_on_exchange_multi(default_conf, ticker, fee, limit_buy_order, mocker) -> None: """ Tests workflow of selling stoploss_on_exchange. Sells * first trade as stoploss * 2nd trade is kept * 3rd trade is sold via sell-signal """ default_conf['max_open_trades'] = 3 default_conf['exchange']['name'] = 'binance' stoploss = { 'id': 123, 'info': {} } stoploss_order_open = { "id": "123", "timestamp": 1542707426845, "datetime": "2018-11-20T09:50:26.845Z", "lastTradeTimestamp": None, "symbol": "BTC/USDT", "type": "stop_loss_limit", "side": "sell", "price": 1.08801, "amount": 90.99181074, "cost": 0.0, "average": 0.0, "filled": 0.0, "remaining": 0.0, "status": "open", "fee": None, "trades": None } stoploss_order_closed = stoploss_order_open.copy() stoploss_order_closed['status'] = 'closed' stoploss_order_closed['filled'] = stoploss_order_closed['amount'] # Sell first trade based on stoploss, keep 2nd and 3rd trade open stoploss_order_mock = MagicMock( side_effect=[stoploss_order_closed, stoploss_order_open, stoploss_order_open]) # Sell 3rd trade (not called for the first trade) should_sell_mock = MagicMock(side_effect=[ SellCheckTuple(sell_flag=False, sell_type=SellType.NONE), SellCheckTuple(sell_flag=True, sell_type=SellType.SELL_SIGNAL)] ) cancel_order_mock = MagicMock() mocker.patch('freqtrade.exchange.Binance.stoploss', stoploss) mocker.patch.multiple( 'freqtrade.exchange.Exchange', fetch_ticker=ticker, get_fee=fee, amount_to_precision=lambda s, x, y: y, price_to_precision=lambda s, x, y: y, fetch_stoploss_order=stoploss_order_mock, cancel_stoploss_order=cancel_order_mock, ) mocker.patch.multiple( 'freqtrade.freqtradebot.FreqtradeBot', create_stoploss_order=MagicMock(return_value=True), _notify_sell=MagicMock(), ) mocker.patch("freqtrade.strategy.interface.IStrategy.should_sell", should_sell_mock) wallets_mock = mocker.patch("freqtrade.wallets.Wallets.update", MagicMock()) mocker.patch("freqtrade.wallets.Wallets.get_free", MagicMock(return_value=1000)) freqtrade = get_patched_freqtradebot(mocker, default_conf) freqtrade.strategy.order_types['stoploss_on_exchange'] = True # Switch ordertype to market to close trade immediately freqtrade.strategy.order_types['sell'] = 'market' freqtrade.strategy.confirm_trade_entry = MagicMock(return_value=True) freqtrade.strategy.confirm_trade_exit = MagicMock(return_value=True) patch_get_signal(freqtrade) # Create some test data freqtrade.enter_positions() assert freqtrade.strategy.confirm_trade_entry.call_count == 3 freqtrade.strategy.confirm_trade_entry.reset_mock() assert freqtrade.strategy.confirm_trade_exit.call_count == 0 wallets_mock.reset_mock() Trade.session = MagicMock() trades = Trade.query.all() # Make sure stoploss-order is open and trade is bought (since we mock update_trade_state) for trade in trades: trade.stoploss_order_id = 3 trade.open_order_id = None n = freqtrade.exit_positions(trades) assert n == 2 assert should_sell_mock.call_count == 2 assert freqtrade.strategy.confirm_trade_entry.call_count == 0 assert freqtrade.strategy.confirm_trade_exit.call_count == 1 freqtrade.strategy.confirm_trade_exit.reset_mock() # Only order for 3rd trade needs to be cancelled assert cancel_order_mock.call_count == 1 # Wallets must be updated between stoploss cancellation and selling, and will be updated again # during update_trade_state assert wallets_mock.call_count == 4 trade = trades[0] assert trade.sell_reason == SellType.STOPLOSS_ON_EXCHANGE.value assert not trade.is_open trade = trades[1] assert not trade.sell_reason assert trade.is_open trade = trades[2] assert trade.sell_reason == SellType.SELL_SIGNAL.value assert not trade.is_open
def test_no_auto_confirm_when_something_else(self): possibilities = ('x', 0, 1, '', None, False) confirm = Confirm() confirm.input = MagicMock(side_effect=possibilities) for user_input in possibilities: self.assertFalse(confirm.ask(), user_input)
def valid_service_account_patcher(): patches = [] valid_type_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.is_valid_service_account_type", valid_type_mock, )) patches.append( patch( "fence.resources.google.validity.is_valid_service_account_type", valid_type_mock, )) external_access_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.service_account_has_external_access", external_access_mock, )) patches.append( patch( "fence.resources.google.validity.service_account_has_external_access", external_access_mock, )) from_google_project_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.is_service_account_from_google_project", from_google_project_mock, )) patches.append( patch( "fence.resources.google.validity.is_service_account_from_google_project", from_google_project_mock, )) get_policy_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.get_service_account_policy", get_policy_mock, )) patches.append( patch( "fence.resources.google.validity.get_service_account_policy", get_policy_mock, )) valid_type_mock.return_value = True external_access_mock.return_value = False from_google_project_mock.return_value = True get_policy_mock.return_value = None for patched_function in patches: patched_function.start() yield { "is_valid_service_account_type": (valid_type_mock), "service_account_has_external_access": (external_access_mock), "is_service_account_from_google_project": (from_google_project_mock), "get_service_account_policy": (get_policy_mock), } for patched_function in patches: patched_function.stop()
def test_forcebuy_last_unlimited(default_conf, ticker, fee, limit_buy_order, mocker, balance_ratio, result1) -> None: """ Tests workflow unlimited stake-amount Buy 4 trades, forcebuy a 5th trade Sell one trade, calculated stake amount should now be lower than before since one trade was sold at a loss. """ default_conf['max_open_trades'] = 5 default_conf['forcebuy_enable'] = True default_conf['stake_amount'] = 'unlimited' default_conf['tradable_balance_ratio'] = balance_ratio default_conf['dry_run_wallet'] = 1000 default_conf['exchange']['name'] = 'binance' default_conf['telegram']['enabled'] = True mocker.patch('freqtrade.rpc.telegram.Telegram', MagicMock()) mocker.patch.multiple( 'freqtrade.exchange.Exchange', fetch_ticker=ticker, get_fee=fee, amount_to_precision=lambda s, x, y: y, price_to_precision=lambda s, x, y: y, ) mocker.patch.multiple( 'freqtrade.freqtradebot.FreqtradeBot', create_stoploss_order=MagicMock(return_value=True), _notify_sell=MagicMock(), ) should_sell_mock = MagicMock(side_effect=[ SellCheckTuple(sell_flag=False, sell_type=SellType.NONE), SellCheckTuple(sell_flag=True, sell_type=SellType.SELL_SIGNAL), SellCheckTuple(sell_flag=False, sell_type=SellType.NONE), SellCheckTuple(sell_flag=False, sell_type=SellType.NONE), SellCheckTuple(sell_flag=None, sell_type=SellType.NONE)] ) mocker.patch("freqtrade.strategy.interface.IStrategy.should_sell", should_sell_mock) freqtrade = get_patched_freqtradebot(mocker, default_conf) rpc = RPC(freqtrade) freqtrade.strategy.order_types['stoploss_on_exchange'] = True # Switch ordertype to market to close trade immediately freqtrade.strategy.order_types['sell'] = 'market' patch_get_signal(freqtrade) # Create 4 trades n = freqtrade.enter_positions() assert n == 4 trades = Trade.query.all() assert len(trades) == 4 assert freqtrade.wallets.get_trade_stake_amount( 'XRP/BTC', freqtrade.get_free_open_trades()) == result1 rpc._rpc_forcebuy('TKN/BTC', None) trades = Trade.query.all() assert len(trades) == 5 for trade in trades: assert trade.stake_amount == result1 # Reset trade open order id's trade.open_order_id = None trades = Trade.get_open_trades() assert len(trades) == 5 bals = freqtrade.wallets.get_all_balances() n = freqtrade.exit_positions(trades) assert n == 1 trades = Trade.get_open_trades() # One trade sold assert len(trades) == 4 # stake-amount should now be reduced, since one trade was sold at a loss. assert freqtrade.wallets.get_trade_stake_amount( 'XRP/BTC', freqtrade.get_free_open_trades()) < result1 # Validate that balance of sold trade is not in dry-run balances anymore. bals2 = freqtrade.wallets.get_all_balances() assert bals != bals2 assert len(bals) == 6 assert len(bals2) == 5 assert 'LTC' in bals assert 'LTC' not in bals2
def netdisco_mock(): """Mock netdisco.""" with patch.dict("sys.modules", {"netdisco.discovery": MagicMock()}): yield
def valid_google_project_patcher(): patches = [] get_project_number_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.get_google_project_number", get_project_number_mock, )) patches.append( patch( "fence.resources.google.validity.get_google_project_number", get_project_number_mock, )) parent_org_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.get_google_project_parent_org", parent_org_mock, )) patches.append( patch( "fence.resources.google.validity.get_google_project_parent_org", parent_org_mock, )) valid_membership_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.get_google_project_valid_users_and_service_accounts", valid_membership_mock, )) patches.append( patch( "fence.resources.google.validity.get_google_project_valid_users_and_service_accounts", valid_membership_mock, )) get_users_from_members_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.get_users_from_google_members", get_users_from_members_mock, )) patches.append( patch( "fence.resources.google.validity.get_users_from_google_members", get_users_from_members_mock, )) remove_white_listed_accounts_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.remove_white_listed_service_account_ids", remove_white_listed_accounts_mock, )) patches.append( patch( "fence.resources.google.validity.remove_white_listed_service_account_ids", remove_white_listed_accounts_mock, )) users_have_access_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.do_all_users_have_access_to_project", users_have_access_mock, )) patches.append( patch( "fence.resources.google.validity.do_all_users_have_access_to_project", users_have_access_mock, )) get_registered_service_accounts_with_access_mock = MagicMock() patches.append( patch( "fence.resources.google.validity.get_registered_service_accounts_with_access", get_registered_service_accounts_with_access_mock, )) project_access_mock = MagicMock() patches.append( patch( "fence.resources.google.validity.get_project_access_from_service_accounts", project_access_mock, )) project_service_accounts_mock = MagicMock() patches.append( patch( "fence.resources.google.validity.get_service_account_ids_from_google_members", project_service_accounts_mock, )) user_has_access_mock = MagicMock() patches.append( patch( "fence.resources.google.access_utils.is_user_member_of_all_google_projects", user_has_access_mock, )) patches.append( patch( "fence.resources.google.validity.is_user_member_of_all_google_projects", user_has_access_mock, )) patches.append( patch( "fence.resources.google.access_utils.is_user_member_of_google_project", user_has_access_mock, )) patches.append( patch( "fence.resources.google.validity.is_user_member_of_google_project", user_has_access_mock, )) get_project_number_mock.return_value = 1 parent_org_mock.return_value = None valid_membership_mock.return_value = [], [] get_users_from_members_mock.return_value = [] users_have_access_mock.return_value = True project_service_accounts_mock.return_value = [] user_has_access_mock.return_value = True for patched_function in patches: patched_function.start() yield { "get_google_project_number": (get_project_number_mock), "get_google_project_parent_org": (parent_org_mock), "get_google_project_valid_users_and_service_accounts": (valid_membership_mock), "get_users_from_google_members": (get_users_from_members_mock), "remove_white_listed_service_account_ids": (remove_white_listed_accounts_mock), "do_all_users_have_access_to_project": (users_have_access_mock), "get_registered_service_accounts_with_access": (get_registered_service_accounts_with_access_mock), "get_project_access_from_service_accounts": (project_access_mock), "get_service_account_ids_from_google_members": (project_service_accounts_mock), } for patched_function in patches: patched_function.stop()
def mordo(): return MagicMock(spec=Ordonnance)
class JobViewTest(TestCase): def setUp(self): logger.setLevel(logging.WARNING) # Set up generic owner self.owner = models.User.objects.create_user(username=f"tested{get_uuid(10)}", email="*****@*****.**") self.owner.set_password("tested") self.project = auth.create_project(user=self.owner, name="tested", text="Text", summary="summary", uid="tested") self.recipe = auth.create_analysis(project=self.project, json_text="{}", template="", security=models.Analysis.AUTHORIZED) self.job = auth.create_job(analysis=self.recipe, user=self.owner) self.job.save() def test_scheduler(self): """ Test task scheduler used to run queued jobs. """ from biostar.recipes.tasks import scheduler self.job = auth.create_job(analysis=self.recipe, user=self.owner) self.job.state = models.Job.QUEUED self.job.save() scheduler.timer([]) @patch('biostar.recipes.models.Job.save', MagicMock(name="save")) def test_job_edit(self): "Test job edit with POST request" data = {'name':'tested', 'text':"tested" } url = reverse('job_edit', kwargs=dict(uid=self.job.uid)) request = fake_request(url=url, data=data, user=self.owner) response = views.job_edit(request=request, uid=self.job.uid) self.process_response(response=response, data=data, save=True) def test_job_runner(self): "Testing Job runner using management command" management.call_command('job', id=self.job.id, verbosity=2) management.call_command('job', list=True) def test_job_delete(self): "Test job delete" url = reverse('job_delete', kwargs=dict(uid=self.job.uid)) request = fake_request(url=url, data={}, user=self.owner) response = views.job_delete(request=request, uid=self.job.uid) self.process_response(response=response, data={}) def test_job_rerun(self): "Test Job rerun" url = reverse('job_delete', kwargs=dict(uid=self.job.uid)) request = fake_request(url=url, data={}, user=self.owner) response = views.job_rerun(request=request, uid=self.job.uid) self.process_response(response=response, data={}) def test_job_serve(self): "Test file serve function." from django.http.response import FileResponse management.call_command('job', id=self.job.id) url = reverse('job_view', kwargs=dict(uid=self.job.uid)) data = {"paths":"runlog/input.json"} request = fake_request(url=url, data=data, user=self.owner) response = views.job_serve(request=request, uid=self.job.uid, path=data["paths"]) self.assertTrue(isinstance(response, FileResponse), "Response is not a file.") def process_response(self, response, data, save=False): "Check the response on POST request is redirected" self.assertEqual(response.status_code, 302, f"Could not redirect to project view after tested :\nresponse:{response}") if save: self.assertTrue( models.Job.save.called, "save() method not called")
def test_end_to_end(self, HelperToolsMock3, HelperToolsMock2, HelperToolsMock): ete_dir = os.path.join(getParentDir(__file__), "res", "end_to_end_test") match_id = "1790745997" fake_teams_file = os.path.join(ete_dir, "teams.json") fake_players_dir = os.path.join(ete_dir, "web", "live", "players") api_key_file = os.path.join(getParentDir(getParentDir(getParentDir(__file__))), "config", "api_key.key") fake_matches_dir = os.path.join(ete_dir, "matches") fake_cfg = os.path.join(ete_dir, "config.cfg") fake_web_dir = os.path.join(ete_dir, "web") fake_match_file = os.path.join(ete_dir, "web", "live", "matches", match_id + ".json") HelperToolsMock2.getMatchFile = MagicMock(return_value=fake_match_file) HelperToolsMock.getTeamsFile = MagicMock(return_value=fake_teams_file) HelperToolsMock.getPlayersDir = MagicMock(return_value=fake_players_dir) HelperToolsMock.getApiKeyFile = MagicMock(return_value=api_key_file) HelperToolsMock.getMatchesDir = MagicMock(return_value=fake_matches_dir) HelperToolsMock.getConfigFile = MagicMock(return_value=fake_cfg) HelperToolsMock.getWebDir = MagicMock(return_value=fake_web_dir) HelperToolsMock.log = myLog HelperToolsMock2.getMatchFile = MagicMock(return_value=fake_match_file) HelperToolsMock2.getTeamsFile = MagicMock(return_value=fake_teams_file) HelperToolsMock2.getPlayersDir = MagicMock(return_value=fake_players_dir) HelperToolsMock2.getApiKeyFile = MagicMock(return_value=api_key_file) HelperToolsMock2.getMatchesDir = MagicMock(return_value=fake_matches_dir) HelperToolsMock2.getConfigFile = MagicMock(return_value=fake_cfg) HelperToolsMock2.getWebDir = MagicMock(return_value=fake_web_dir) HelperToolsMock2.log = myLog HelperToolsMock3.getMatchFile = MagicMock(return_value=fake_match_file) crawler = AtrapCrawler() fake_wrapper = FakeDota2ApiWrapper(crawler) crawler.api_wrapper = fake_wrapper crawler.start()
def mitem(ent): return MagicMock(spec=Item, return_value=ent)
def get_first(): first = TestModel.get(foo="first", bar="one") first.put = MagicMock() first.update_item = MagicMock() return first
def ent(): return MagicMock(**{"dico": {"le": "dico"}})
class TestDiskEntry(unittest.TestCase): """ Here, we mock `check_output` calls to disk utility tools. """ def setUp(self): """We use these mocks so often, it's worth defining them here.""" self.disk_instance_mock = HelperMethods.entry_mock(Disk) self.output_mock = MagicMock() def test_disk_get_local_filesystems(self): """Tests `Disk._get_local_filesystems`.""" # This minimal `_disk_dict` contains everything this method touches. self.disk_instance_mock._disk_dict = { # pylint: disable=protected-access '/very/good/mountpoint': { 'device_path': '/dev/sda1' }, '/mounted/here/too': { 'device_path': '/dev/sda1' }, '/other/acceptable/device/paths': { 'device_path': '/dev/anything-really' }, '/a/samba/share': { 'device_path': '//server.local/cool_share' # ignored - not `/dev/...` }, '/linux/loop/device/one': { 'device_path': '/dev/loop0' # ignored - loop device }, '/linux/loop/device/two': { 'device_path': '/dev/blah/loop0' # ignored - loop device }, '/bsd/s/loop/device/one': { 'device_path': '/dev/svnd' # ignored - loop device }, '/bsd/s/loop/device/two': { 'device_path': '/dev/blah/svnd1' # ignored - loop device }, '/bsd/r/loop/device/one': { 'device_path': '/dev/rvnd' # ignored - loop device }, '/bsd/r/loop/device/two': { 'device_path': '/dev/blah/rvnd1' # ignored - loop device }, '/solaris/loop/device/one': { 'device_path': '/dev/lofi1' # ignored - loop device }, '/solaris/loop/device/two': { 'device_path': '/dev/blah/lofi' # ignored - loop device }, '/linux/device/mapper': { 'device_path': '/dev/dm-1' # ignored - device mapper } } result_disk_dict = Disk._get_local_filesystems(self.disk_instance_mock) # pylint: disable=protected-access # Python < 3.6 doesn't guarantee dict ordering, # so we can't know which `/dev/sda1` mount point was used. self.assertEqual( len(result_disk_dict), 2 # (/dev/sda1 is de-duplicated) ) self.assertIn('/other/acceptable/device/paths', result_disk_dict) # If we can now find `/dev/sda1`, then we logically must have the correct result. self.assertTrue(any(disk_data['device_path'] == '/dev/sda1' for disk_data in result_disk_dict.values()), msg='`/dev/sda1` missing from results dict') def test_disk_get_specified_filesystems(self): """Tests `Disk._get_specified_filesystems`.""" # This minimal `_disk_dict` contains everything this method touches. self.disk_instance_mock._disk_dict = { # pylint: disable=protected-access '/very/good/mountpoint': { 'device_path': '/dev/sda1' }, '/mounted/here/too': { 'device_path': '/dev/sda1' }, '/less/good/mountpoint': { 'device_path': '/dev/sda2' }, '/a/samba/share': { 'device_path': '//server.local/cool_share' } } with self.subTest('Get all filesystems with mount points.'): # pylint: disable=protected-access self.assertDictEqual( Disk._get_specified_filesystems( self.disk_instance_mock, self.disk_instance_mock. _disk_dict # recall dicts are iterables of their keys. ), self.disk_instance_mock._disk_dict) # pylint: enable=protected-access with self.subTest('Get only `/dev/sda1` filesystems.'): result_disk_dict = Disk._get_specified_filesystems( # pylint: disable=protected-access self.disk_instance_mock, ('/dev/sda1', )) # With Python < 3.6, dict ordering isn't guaranteed, # so we don't know which disk will be selected. self.assertEqual(len(result_disk_dict), 1) # As long as `device_path` is also correct, this passes. self.assertEqual( result_disk_dict[list( result_disk_dict.keys())[0]]['device_path'], '/dev/sda1') @patch( 'archey.entries.disk.check_output', side_effect=[ # First `df` call succeeds. os.linesep.join(( "Filesystem 1024-blocks Used Available Capacity Mounted on", "/dev/nvme0n1p2 499581952 427458276 67779164 87% /", "tmpfs 8127236 292 8126944 1% /tmp", "/dev/nvme0n1p1 523248 35908 487340 7% /boot", "")), # Second `df` call fails (emulating it not being present). FileNotFoundError ]) def test_disk_df_output_dict(self, _): """Test method to get `df` output as a dict by mocking calls to `check_output`.""" self.assertDictEqual( Disk.get_df_output_dict(), { '/': { 'device_path': '/dev/nvme0n1p2', 'used_blocks': 427458276, 'total_blocks': 499581952 }, '/tmp': { 'device_path': 'tmpfs', 'used_blocks': 292, 'total_blocks': 8127236 }, '/boot': { 'device_path': '/dev/nvme0n1p1', 'used_blocks': 35908, 'total_blocks': 523248 } }) with self.subTest('Missing `df` from system.'): self.assertDictEqual(Disk.get_df_output_dict(), {}) def test_disk_blocks_to_human_readable(self): """Test method to convert 1024-byte blocks to a human readable format.""" # Each tuple is a number of blocks followed by the expected output. test_cases = ( (1, '1.0 KiB'), (1024, '1.0 MiB'), (2048, '2.0 MiB'), (95604, '93.4 MiB'), (1048576, '1.0 GiB'), (2097152, '2.0 GiB'), (92156042, '87.9 GiB'), (1073742000, '1.0 TiB'), (2147484000, '2.0 TiB'), (458028916298, '426.6 TiB'), (1099512000000, '1.0 PiB'), (2199023000000, '2.0 PiB') # I think we can safely stop here :) ) for test_case in test_cases: with self.subTest(test_case[1]): self.assertEqual( Disk._blocks_to_human_readable(test_case[0]), # pylint: disable=protected-access test_case[1]) def test_disk_output_colors(self): """Test `output` disk level coloring.""" # This dict's values are tuples of used blocks, and the level's corresponding color. # For reference, this test uses a disk whose total block count is 100. levels = { 'normal': (45.0, Colors.GREEN_NORMAL), 'warning': (70.0, Colors.YELLOW_NORMAL), 'danger': (95.0, Colors.RED_NORMAL) } for level, blocks_color_tuple in levels.items(): with self.subTest(level): self.disk_instance_mock.value = { 'mount_point': { 'device_path': '/dev/my-cool-disk', 'used_blocks': blocks_color_tuple[0], 'total_blocks': 100 } } Disk.output(self.disk_instance_mock, self.output_mock) self.output_mock.append.assert_called_with( 'Disk', '{color}{used} KiB{clear} / 100.0 KiB'.format( color=blocks_color_tuple[1], used=blocks_color_tuple[0], clear=Colors.CLEAR)) def test_disk_multiline_output(self): """Test `output`'s multi-line capability.""" self.disk_instance_mock.value = { 'first_mount_point': { 'device_path': '/dev/my-cool-disk', 'used_blocks': 10, 'total_blocks': 10 }, 'second_mount_point': { 'device_path': '/dev/my-cooler-disk', 'used_blocks': 10, 'total_blocks': 30 } } with self.subTest('Single-line combined output.'): Disk.output(self.disk_instance_mock, self.output_mock) self.output_mock.append.assert_called_once_with( 'Disk', '{0}20.0 KiB{1} / 40.0 KiB'.format(Colors.YELLOW_NORMAL, Colors.CLEAR)) self.output_mock.reset_mock() with self.subTest('Multi-line output'): self.disk_instance_mock._configuration['disk'][ 'combine_total'] = False # pylint: disable=protected-access Disk.output(self.disk_instance_mock, self.output_mock) self.assertEqual(self.output_mock.append.call_count, 2) self.output_mock.append.assert_has_calls( [ call( 'Disk', '{0}10.0 KiB{1} / 10.0 KiB'.format( Colors.RED_NORMAL, Colors.CLEAR)), call( 'Disk', '{0}10.0 KiB{1} / 30.0 KiB'.format( Colors.GREEN_NORMAL, Colors.CLEAR)) ], any_order= True # Since Python < 3.6 doesn't have definite `dict` ordering. ) self.output_mock.reset_mock() with self.subTest('Entry name labeling (device path with entry name)'): self.disk_instance_mock._configuration['disk'][ 'combine_total'] = False # pylint: disable=protected-access self.disk_instance_mock._configuration['disk'][ 'disk_labels'] = 'device_paths' # pylint: disable=protected-access Disk.output(self.disk_instance_mock, self.output_mock) self.assertEqual(self.output_mock.append.call_count, 2) self.output_mock.append.assert_has_calls( [ call( 'Disk (/dev/my-cool-disk)', '{0}10.0 KiB{1} / 10.0 KiB'.format( Colors.RED_NORMAL, Colors.CLEAR)), call( 'Disk (/dev/my-cooler-disk)', '{0}10.0 KiB{1} / 30.0 KiB'.format( Colors.GREEN_NORMAL, Colors.CLEAR)) ], any_order= True # Since Python < 3.6 doesn't have definite `dict` ordering. ) self.output_mock.reset_mock() with self.subTest( 'Entry name labeling (mount points without entry name)'): self.disk_instance_mock._configuration['disk'][ 'combine_total'] = False # pylint: disable=protected-access self.disk_instance_mock._configuration['disk'][ 'disk_labels'] = 'mount_points' # pylint: disable=protected-access self.disk_instance_mock._configuration['disk'][ 'hide_entry_name'] = True # pylint: disable=protected-access Disk.output(self.disk_instance_mock, self.output_mock) self.assertEqual(self.output_mock.append.call_count, 2) self.output_mock.append.assert_has_calls( [ call( '(first_mount_point)', '{0}10.0 KiB{1} / 10.0 KiB'.format( Colors.RED_NORMAL, Colors.CLEAR)), call( '(second_mount_point)', '{0}10.0 KiB{1} / 30.0 KiB'.format( Colors.GREEN_NORMAL, Colors.CLEAR)) ], any_order= True # Since Python < 3.6 doesn't have definite `dict` ordering. ) self.output_mock.reset_mock() with self.subTest( 'Entry name labeling (without disk label nor entry name)'): self.disk_instance_mock._configuration['disk'][ 'combine_total'] = False # pylint: disable=protected-access self.disk_instance_mock._configuration['disk'][ 'disk_labels'] = False # pylint: disable=protected-access # `hide_entry_name` is being ignored as `disk_labels` evaluates to "falsy" too. self.disk_instance_mock._configuration['disk'][ 'hide_entry_name'] = True # pylint: disable=protected-access Disk.output(self.disk_instance_mock, self.output_mock) self.assertEqual(self.output_mock.append.call_count, 2) self.output_mock.append.assert_has_calls( [ call( 'Disk', '{0}10.0 KiB{1} / 10.0 KiB'.format( Colors.RED_NORMAL, Colors.CLEAR)), call( 'Disk', '{0}10.0 KiB{1} / 30.0 KiB'.format( Colors.GREEN_NORMAL, Colors.CLEAR)) ], any_order= True # Since Python < 3.6 doesn't have definite `dict` ordering. )
def test_main(monkeypatch): sync_main_mock = MagicMock() monkeypatch.setattr(client, 'sync_main', sync_main_mock) main() sync_main_mock.asset_called_once_with(async_main, default_config=get_default_config())
def test_order_put_wrong_type_list(self): with self.assertRaisesRegex(TypeError, "^Items IDs must be a list$"): self.shop_database.order_put_patch(1, 1, MagicMock(spec=int))
def setUp(self): """We use these mocks so often, it's worth defining them here.""" self.disk_instance_mock = HelperMethods.entry_mock(Disk) self.output_mock = MagicMock()
def test_order_put_wrong_type_id_client(self): with self.assertRaisesRegex( TypeError, "^Both order and client IDs must be integers$"): self.shop_database.order_put_patch(1, MagicMock(spec=str), [1, 2])
def test_order_put_wrong_type_items(self): with self.assertRaisesRegex(TypeError, "^Items IDs must all be integers$"): self.shop_database.order_put_patch(1, 1, [1, MagicMock(spec=str)])