async def test_delete_categorie(self, get_repository_mock, verify_if_exists_and_is_not_deleted_mock): # Arrange categorie_mock = Mock() repository_mock = Mock() repository_mock.get_by_id.side_effect = [categorie_mock] get_repository_mock.side_effect = [repository_mock] verify_if_exists_and_is_not_deleted_mock.return_value = True # Action await delete_categorie('1') # Asserts get_repository_mock_calls = get_repository_mock.mock_calls self.assertEqual(len(get_repository_mock_calls), 1) get_repository_mock.assert_has_calls([call()]) repository_mock_calls = repository_mock.mock_calls self.assertEqual(len(repository_mock_calls), 2) repository_mock.assert_has_calls( [call.get_by_id(Categorie, '1'), call.save(categorie_mock)]) categorie_mock_calls = categorie_mock.mock_calls self.assertEqual(len(categorie_mock_calls), 1) categorie_mock.assert_has_calls([call.delete()]) verify_if_exists_and_is_not_deleted_mock_calls = verify_if_exists_and_is_not_deleted_mock.mock_calls self.assertEqual(len(verify_if_exists_and_is_not_deleted_mock_calls), 1) verify_if_exists_and_is_not_deleted_mock.assert_has_calls( [call(categorie_mock)])
async def test_get_categorie_by_id( self, get_repository_mock, verify_if_exists_and_is_not_deleted_mock, categorie_response_mock, ): # Arrange repository_mock = Mock() repository_mock.get_by_id.return_value = 'categorie' get_repository_mock.side_effect = [repository_mock] verify_if_exists_and_is_not_deleted_mock.return_value = True categorie_response_mock.from_domain.return_value = 'response' # Action response = await get_categorie_by_id('1') # Asserts self.assertEqual(response, 'response') get_repository_mock_calls = get_repository_mock.mock_calls self.assertEqual(len(get_repository_mock_calls), 1) get_repository_mock.assert_has_calls([call()]) repository_mock_calls = repository_mock.mock_calls self.assertEqual(len(repository_mock_calls), 1) repository_mock.assert_has_calls([call.get_by_id(Categorie, '1')]) verify_if_exists_and_is_not_deleted_mock_calls = verify_if_exists_and_is_not_deleted_mock.mock_calls self.assertEqual(len(verify_if_exists_and_is_not_deleted_mock_calls), 1) verify_if_exists_and_is_not_deleted_mock.assert_has_calls( [call('categorie')])
async def test(self, release_mock, stack_parse_mock): release_mock.return_value = "100.0.0" # so we ignore the kernel check # so we ignore the kernel check collecter = perf.MemoryEvents(self.time, None) await collecter.collect() self.create_mock.assert_has_calls([ asynctest.call( "perf record -ag -o " + perf.MemoryEvents._PERF_FILE_NAME + " -e '{mem-loads,mem-stores}' sleep " + str(self.time), stderr=self.pipe_mock), asynctest.call().communicate(), asynctest.call("perf script -i " + perf.MemoryEvents._PERF_FILE_NAME, stdout=self.pipe_mock, stderr=self.pipe_mock), asynctest.call().communicate() ]) # self.log_mock.error.assert_has_calls([ # asynctest.call('test_err1'), # asynctest.call('test_err2') # ]) self.os_mock.remove.assert_called_once_with( self.os_mock.getcwd.return_value + "/" + perf.MemoryEvents._PERF_FILE_NAME) self.strio_mock.assert_called_once_with('test_out2') stack_parse_mock.assert_called_once_with(self.strio_mock('test_out2')) stack_parse_mock.return_value.stack_collapse.assert_called_once_with()
async def test_logfile(self, mock_book, mock_connect): mock_connect.return_value.aenter.receive_str = CoroutineMock() mock_connect.return_value.aenter.send_json = CoroutineMock() message_expected = { "type": "done", "side": "sell", "order_id": "4eef1226-4b38-422c-a5b1-56def7107f9a", "reason": "canceled", "product_id": "ETH-USD", "price": "2601.76000000", "remaining_size": "3.09000000", "sequence": 2, "time": "2017-06-25T11:23:14.775000Z" } mock_connect.return_value.aenter.receive_str.side_effect = [ json.dumps(message_expected) ] product_id = 'ETH-USD' book = {'bids': [], 'asks': [], 'sequence': 1} mock_book.return_value = book calls = [call(f'B {product_id} {json.dumps(book)}\n')] with patch('aiofiles.open', new_callable=AsyncContextManagerMock) as mock_open: mock_open.return_value.aenter.write = CoroutineMock() mock_write = mock_open.return_value.aenter.write async with gdax.orderbook.OrderBook( [product_id], trade_log_file_path='trades.txt') as orderbook: mock_write.assert_has_calls(calls) await orderbook.handle_message() calls.append(call(f'W {json.dumps(message_expected)}\n')) mock_write.assert_has_calls(calls)
async def test_get_file_content_calls(self, mock_get_pr_links, mock_validate_pr_link, mock_get_single_content): manager = PullRequestsGithubManager("valid-pat") mock_get_pr_links.return_value = { 'base_url': 'http://base_url.com', 'head_url': 'http://head_url.com', 'commit_id': '123sha' } mock_validate_pr_link.side_effect = [ 'http://base_url_download.com', 'http://head_url_download.com' ] mock_get_single_content.side_effect = ['base content', 'head content'] result = await manager.get_file_content("valid-prid", "valid-filename") mock_get_pr_links.assert_called_once_with("valid-prid", "valid-filename") assert mock_validate_pr_link.call_count == 2 mock_validate_pr_link.assert_has_calls( [call("http://base_url.com"), call("http://head_url.com")], any_order=True) assert mock_get_single_content.call_count == 2 mock_get_single_content.assert_has_calls([ call("http://base_url_download.com"), call("http://head_url_download.com") ], any_order=True) assert result == { 'base_content': 'base content', 'head_content': 'head content', 'commit_id': '123sha' }
async def test_heartbeat(self, mock_book, mock_connect): mock_connect.return_value.aenter.send_json = CoroutineMock() mock_connect.return_value.aenter.receive_str = CoroutineMock() mock_book.return_value = {'bids': [], 'asks': [], 'sequence': 1} message_expected = { "type": "heartbeat", "last_trade_id": 17393422, "product_id": "ETH-USD", "sequence": 2, "time": "2017-06-25T11:23:14.838000Z" } mock_connect.return_value.aenter.receive_str.side_effect = [ json.dumps(message_expected), ] product_ids = ['ETH-USD'] async with gdax.orderbook.OrderBook(product_ids, use_heartbeat=True) as orderbook: subscribe_msg = {'type': 'subscribe', 'product_ids': product_ids} heartbeat_msg = {'type': 'heartbeat', 'on': True} calls = [call(subscribe_msg), call(heartbeat_msg)] mock_connect.return_value.aenter.send_json.assert_has_calls(calls) message = await orderbook.handle_message() assert message == message_expected
async def test_poster_polls_and_posts(mocker): event1 = K8sEvent(type='type1', reason='reason1', message='message1', ref=REF1) event2 = K8sEvent(type='type2', reason='reason2', message='message2', ref=REF2) event_queue = asyncio.Queue() event_queue.put_nowait(event1) event_queue.put_nowait(event2) # A way to cancel `while True` cycle when we need it (ASAP). def _cancel(*args, **kwargs): if post_event.call_count >= 2: raise asyncio.CancelledError() post_event = mocker.patch('kopf.clients.events.post_event', side_effect=_cancel) backbone = Backbone() await backbone.fill(resources=[EVENTS]) # A way to cancel a `while True` cycle by timing, even if the routines are not called. with pytest.raises(asyncio.CancelledError): async with async_timeout.timeout(0.5): await poster(event_queue=event_queue, backbone=backbone) assert post_event.call_count == 2 assert post_event.await_count == 2 assert post_event.called_with( call(ref=REF1, type='type1', reason='reason1', message='message1'), call(ref=REF2, type='type2', reason='reason2', message='message2'), )
def test_generate_events_empty_dict(self, output_mock): """ Test _generate_events with no dict - tests failure to find port mapping """ tracer = object.__new__(ebpf.TCPTracer) tracer.options = None data = StringIO( "header1\n" "time type pid comm ip s_addr d_addr s_port d_port size netns\n" "1 A 2 comm1 4 127. 127. 3 4 1 5\n" "6 B 7 comm2 4 127. 127. 4 3 1 5\n" "1 A 2 comm3 4 x x 3 4 1 5\n" ) result = list(tracer._generate_events(data, {})) self.assertEqual([], result) expected_errors = [ asynctest.call( text="IPC: Could not find destination port PID/comm. " "Check log for details.", description="Could not find destination port PID/comm: " "Time: 1 Type: A Source PID: 2 " "Source comm: comm1 Source port : 3 " "Dest port: 4 Net namespace: 5"), asynctest.call( text="IPC: Could not find destination port PID/comm. " "Check log for details.", description="Could not find destination port PID/comm: " "Time: 6 Type: B Source PID: 7 " "Source comm: comm2 Source port : 4 " "Dest port: 3 Net namespace: 5") ] output_mock.error_.assert_has_calls(expected_errors)
async def test_partial_writes(temp_file, loop): ctx = asynctest.Mock(caio.AbstractContext) ctx.loop = loop ctx.fdsync = asynctest.CoroutineMock(return_value=None) ctx.write = asynctest.CoroutineMock(side_effect=asyncio.InvalidStateError) async with AIOFile(temp_file, 'w', context=ctx) as afp: # 1 return_iter = iter((3, 4)) ctx.write.side_effect = lambda *_, **__: next(return_iter) await afp.write('aiofile', offset=0) # 2 return_iter = iter((12, 1, 6)) ctx.write.side_effect = lambda *_, **__: next(return_iter) await afp.write('test_partial_writes', offset=8) assert ctx.write.await_args_list == [ # 1 asynctest.call(b'aiofile', afp.fileno(), 0), asynctest.call(b'file', afp.fileno(), 3), # 2 asynctest.call(b'test_partial_writes', afp.fileno(), 8), asynctest.call(b'_writes', afp.fileno(), 20), asynctest.call(b'writes', afp.fileno(), 21) ]
async def test_normal(self, release_mock, mono_patch): mono_patch.side_effect = [0, 1, 2] # so we get exactly 2 collections release_mock.return_value = "100.0.0" # so we ignore the kernel check collecter = smem.MemoryGraph(self.time) data = await collecter.collect() datapoints = list(data.datum_generator) self.async_mock.create_subprocess_shell.assert_has_calls([ asynctest.call("smem -c \"name pss\"", stdout=self.pipe_mock, stderr=self.pipe_mock), asynctest.call().communicate(), asynctest.call("smem -c \"name pss\"", stdout=self.pipe_mock, stderr=self.pipe_mock), asynctest.call().communicate(), ]) # self.log_mock.error.assert_called_once_with('test_err') expected = [ data_io.PointDatum(x=0.0, y=1.0, info='C'), data_io.PointDatum(x=0.0, y=1.0, info='B'), data_io.PointDatum(x=0.0, y=1.0, info='A'), data_io.PointDatum(x=1.0, y=2.0, info='F'), data_io.PointDatum(x=1.0, y=2.0, info='E'), data_io.PointDatum(x=1.0, y=2.0, info='D') ] self.assertEqual(expected, datapoints)
async def test_logger_handlers_are_not_initialized_twice(self): handler = Mock(spec=AsyncStreamHandler, level=logging.DEBUG) with patch("aiologger.logger.AsyncStreamHandler", return_value=handler) as Handler: formatter = Mock() logger = Logger.with_default_handlers(formatter=formatter) await asyncio.gather( logger.info("sardinha"), logger.info("tilápia"), logger.info("xerelete"), logger.error("fraldinha"), ) Handler.allert_has_calls([ call( stream=self.write_pipe, level=logging.DEBUG, formatter=formatter, filter=StdoutFilter(), ), call( stream=self.write_pipe, level=logging.WARNING, formatter=formatter, ), ]) await logger.shutdown()
async def test_poster_polls_and_posts(mocker): event1 = K8sEvent(type='type1', reason='reason1', message='message1', ref=REF1) event2 = K8sEvent(type='type2', reason='reason2', message='message2', ref=REF2) event_queue = asyncio.Queue() event_queue.put_nowait(event1) event_queue.put_nowait(event2) # A way to cancel `while True` cycle when we need it (ASAP). def _cancel(*args, **kwargs): if post_event.call_count >= 2: raise asyncio.CancelledError() post_event = mocker.patch('kopf.clients.events.post_event', side_effect=_cancel) # A way to cancel `whole True` cycle by timing, event if routines are not called. with pytest.raises(asyncio.CancelledError): await asyncio.wait_for(poster(event_queue=event_queue), timeout=0.5) assert post_event.call_count == 2 assert post_event.await_count == 2 assert post_event.called_with( call(ref=REF1, type='type1', reason='reason1', message='message1'), call(ref=REF2, type='type2', reason='reason2', message='message2'), )
async def test(self, release_mock, stack_parse_mock): release_mock.return_value = "100.0.0" # so we ignore the kernel check options = perf.StackTrace.Options(frequency=1, cpufilter="filter") collecter = perf.StackTrace(self.time, options) await collecter.collect() self.create_mock.assert_has_calls([ asynctest.call("perf record -F " + str(options.frequency) + " " + options.cpufilter + " -g -o " + perf.StackTrace._PERF_FILE_NAME + " -- sleep " + str(self.time), stderr=self.pipe_mock), asynctest.call().communicate(), asynctest.call("perf script -i " + perf.StackTrace._PERF_FILE_NAME, stdout=self.pipe_mock, stderr=self.pipe_mock), asynctest.call().communicate() ]) # self.log_mock.error.assert_has_calls([ # asynctest.call('test_err1'), # asynctest.call('test_err2') # ]) self.os_mock.remove.assert_called_once_with( self.os_mock.getcwd.return_value + "/" + perf.StackTrace._PERF_FILE_NAME) self.strio_mock.assert_called_once_with('test_out2') stack_parse_mock.assert_called_once_with(self.strio_mock('test_out2')) stack_parse_mock.return_value.stack_collapse.assert_called_once_with()
async def test_watch_timeout(self): fake_resp = CoroutineMock() fake_resp.content.readline = CoroutineMock() mock_event = { "type": "ADDED", "object": { "metadata": { "name": "test1555", "resourceVersion": "1555" }, "spec": {}, "status": {} } } fake_resp.content.readline.side_effect = [ json.dumps(mock_event).encode('utf8'), asyncio.TimeoutError(), b"" ] fake_api = Mock() fake_api.get_namespaces = CoroutineMock(return_value=fake_resp) fake_api.get_namespaces.__doc__ = ':return: V1NamespaceList' watch = kubernetes_asyncio.watch.Watch() async for e in watch.stream(fake_api.get_namespaces): # noqa pass fake_api.get_namespaces.assert_has_calls([ call(_preload_content=False, watch=True), call(_preload_content=False, watch=True, resource_version='1555') ])
async def test_watch_timeout_with_resource_version(self): fake_resp = CoroutineMock() fake_resp.content.readline = CoroutineMock() fake_resp.release = Mock() fake_resp.content.readline.side_effect = [asyncio.TimeoutError(), b""] fake_api = Mock() fake_api.get_namespaces = CoroutineMock(return_value=fake_resp) fake_api.get_namespaces.__doc__ = ':return: V1NamespaceList' watch = kubernetes_asyncio.watch.Watch() async with watch.stream(fake_api.get_namespaces, resource_version='10') as stream: async for e in stream: # noqa pass # all calls use the passed resource version fake_api.get_namespaces.assert_has_calls([ call(_preload_content=False, watch=True, resource_version='10'), call(_preload_content=False, watch=True, resource_version='10') ]) fake_resp.release.assert_called_once_with() self.assertEqual(watch.resource_version, '10')
async def test_shutdown_is_registered_as_a_signal_handler(self): with patch.object(self.loop, "add_signal_handler") as add_signal_handler: app = self.appCls() add_signal_handler.assert_has_calls([ call(Signals.SIGINT, app.shutdown), call(Signals.SIGTERM, app.shutdown), ])
async def test_should_add_behaviours(self, receive_data, train_predictor): self.analyser.add_behaviour = Mock() await self.analyser.setup() self.assertEqual(self.analyser.add_behaviour.call_count, 2) self.analyser.add_behaviour.assert_has_calls( [call(receive_data()), call(train_predictor(period=3))], any_order=True)
async def test_collector_one_requester(self): print(self.requester1.page) collector = Collector(10, [self.requester1]) posts, max_page = await collector.run_requests() expected = await create_posts() + await create_posts() self.requester1.request.assert_has_calls([call(), call()]) self.assertEqual(posts, expected) self.assertEqual(max_page, 3)
async def test_success(self, release_mock, re_mock): """ Test successful regex matching. """ # Set up mocks release_mock.return_value = "100.0.0" # so we ignore the kernel check self.strio_mock.return_value = StringIO('test_out2') match_mock = re_mock.match.return_value match_mock.group.side_effect = [ "111.999", "test_pid", "test_name", "4", "test_event" ] collecter = perf.SchedulingEvents(self.time) data = await collecter.collect() sched_events = list(data.datum_generator) self.create_mock.assert_has_calls([ asynctest.call("perf sched record -o " + perf.SchedulingEvents._PERF_FILE_NAME + " sleep " + str(self.time), stderr=self.pipe_mock), asynctest.call().communicate(), asynctest.call("perf sched script -i " + perf.SchedulingEvents._PERF_FILE_NAME + " -F 'comm,pid,cpu,time,event'", stdout=self.pipe_mock, stderr=self.pipe_mock), asynctest.call().communicate() ]) # self.log_mock.error.assert_has_calls([ # asynctest.call("test_err1"), # asynctest.call("test_err2") # ]) self.os_mock.remove.assert_called_once_with( self.os_mock.getcwd.return_value + "/" + perf.SchedulingEvents._PERF_FILE_NAME) re_mock.match.assert_called_once_with( r"\s*" r"(?P<name>\S+(\s+\S+)*)\s+" r"(?P<pid>\d+)\s+" r"\[(?P<cpu>\d+)\]\s+" r"(?P<time>\d+.\d+):\s+" r"(?P<event>\S+)", "test_out2") expected_event = data_io.EventDatum( specific_datum={ 'pid': 'test_pid', 'cpu': '4', 'comm': 'test_name' }, #"test_name (pid: test_pid)", "cpu 4"), time=111000999, type="test_event", connected=None) self.assertEqual([expected_event], sched_events)
async def test_all_matcher_with_multiple_submatchers(self): resolver = Mock() submatcher = Mock(match=CoroutineMock(side_effect=(True, False))) matcher = AllMatcher(resolver, [submatcher, submatcher]) request = Mock(Request) self.assertFalse(await matcher.match(request)) submatcher.match.assert_has_calls([call(request), call(request)]) self.assertEqual(submatcher.match.call_count, 2)
async def test_send_emails_no_retry(self): params = ( ("user", "pass"), {"reply-to": "*****@*****.**"}, {"name": "Admin", "email": "*****@*****.**"}, ) self.settings_storage.get_gateway_credentials_headers_and_from.return_value = ( params ) send_email_batch_mock = CoroutineMock(return_value=[]) service = SendEmailService( self.web_client, self.email_client, self.settings_storage, 2 ) with patch.object(service, "send_email_batch", send_email_batch_mock): await service.send_emails( [ {"id": i, "email": f"guy_{i}@co.co", "name": "Guy"} for i in range(1, 6) ], "Hi {name}! Have a nice day", "Subject", ) self.settings_storage.get_gateway_credentials_headers_and_from.assert_awaited_once() send_email_batch_mock.assert_has_calls( [ call( [ {"id": 1, "email": "*****@*****.**", "name": "Guy"}, {"id": 2, "email": "*****@*****.**", "name": "Guy"}, ], "Hi {name}! Have a nice day", "Subject", *params, ), call( [ {"id": 3, "email": "*****@*****.**", "name": "Guy"}, {"id": 4, "email": "*****@*****.**", "name": "Guy"}, ], "Hi {name}! Have a nice day", "Subject", *params, ), call( [{"id": 5, "email": "*****@*****.**", "name": "Guy"}], "Hi {name}! Have a nice day", "Subject", *params, ), ] ) self.ensure_future_mock.assert_not_called()
async def test_method_matcher_with_custom_submatcher(self): resolver = Mock() submatcher = Mock(spec=AttributeMatcher) submatcher.match.side_effect = (False, True) matcher = MethodMatcher(resolver, submatcher) request = Mock(Request, method="GET") self.assertTrue(await matcher.match(request)) submatcher.match.assert_has_calls([call("*"), call(request.method)]) self.assertEqual(submatcher.match.call_count, 2)
def runTest(self): self.mock_cached_config_service.mock_current_time = 1000 self.mock_cached_config_service.get("my_prop") # cache is now stale, should reread from disk self.mock_cached_config_service.mock_current_time = 1400 self.mock_cached_config_service.get("my_prop") # test it was called twice, once for initial fetch, second for stale cache self.mock_cached_config_service._read_config_file.assert_has_calls( [call(), call()])
async def test_it_gets_a_new_cursor_if_current_cursor_dies(self): dead_cursor = CoroutineMock(alive=False) with patch.object(self.observer, 'get_new_cursor', side_effect=[dead_cursor, ShouldStopObservation]): with self.assertRaises(ShouldStopObservation): await self.observer.observe_changes() self.assertEqual(self.observer.get_new_cursor.call_args_list, [call(), call()])
async def test_run_follows_the_clock_tick(self): clock = asynctest.MagicMock() clock.__aiter__.return_value = range(3) with patch.multiple( self.task_runner, clock=clock, can_dispatch_task=CoroutineMock(return_value=False), ): await self.task_runner._run() self.task_runner.can_dispatch_task.assert_has_awaits( [call(), call(), call()])
async def test_collect_normal(self, release_mock, gen_events_mock, gen_dict_mock, os_mock, log_mock, async_mock): """ Test normal operation, when tcptracer outputs KeyboardInterrupt only """ # Set up mocks release_mock.return_value = "100.0.0" # so we ignore the kernel check create_mock = asynctest.CoroutineMock() wait_mock = asynctest.CoroutineMock() # Set up subprocess async_mock.create_subprocess_shell = create_mock pipe_mock = async_mock.subprocess.PIPE # Set up timeout stuff async_mock.wait = wait_mock output_mock = asyncio.Future() output_mock.set_result( (b'output', b'Traceback (most recent call last):\nstacktrace' b'\nstacktrace etc.\nKeyboardInterrupt\n')) wait_mock.side_effect = [([None], [output_mock])] # Begin test collecter = ebpf.TCPTracer(self.time, None) await collecter.collect() # Run checks create_mock.assert_has_calls([ asynctest.call(ebpf.BCC_TOOLS_PATH + 'tcptracer ' + '-tv', stdout=pipe_mock, stderr=pipe_mock, preexec_fn=os_mock.setsid), asynctest.call().communicate() ]) wait_mock.assert_called_once_with([ async_mock.ensure_future( create_mock.return_value.communicate.return_value) ], timeout=self.time) os_mock.killpg.assert_called_once_with(create_mock.return_value.pid, ebpf.signal.SIGINT) log_mock.error.assert_not_called() gen_dict_mock.assert_called_once() gen_events_mock.assert_called_once()
async def test_minio_services(hass, caplog, minio_client): """Test Minio services.""" hass.config.whitelist_external_dirs = set("/test") await async_setup_component( hass, DOMAIN, { DOMAIN: { CONF_HOST: "localhost", CONF_PORT: "9000", CONF_ACCESS_KEY: "abcdef", CONF_SECRET_KEY: "0123456789", CONF_SECURE: "true", } }, ) await hass.async_start() await hass.async_block_till_done() assert "Setup of domain minio took" in caplog.text # Call services await hass.services.async_call( DOMAIN, "put", {"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"}, blocking=True, ) assert minio_client.fput_object.call_args == call( "some_bucket", "some_key", "/test/some_file" ) minio_client.reset_mock() await hass.services.async_call( DOMAIN, "get", {"file_path": "/test/some_file", "key": "some_key", "bucket": "some_bucket"}, blocking=True, ) assert minio_client.fget_object.call_args == call( "some_bucket", "some_key", "/test/some_file" ) minio_client.reset_mock() await hass.services.async_call( DOMAIN, "remove", {"key": "some_key", "bucket": "some_bucket"}, blocking=True ) assert minio_client.remove_object.call_args == call("some_bucket", "some_key") minio_client.reset_mock()
async def test_save(self): self.control._save_to_s3 = asynctest.CoroutineMock(return_value=True) self.control._save_to_file = asynctest.CoroutineMock(return_value=True) res = await self.control.save("tmp/test", {"foo": "bla"}) assert res is True assert self.control._save_to_file.call_count == 1 assert self.control._save_to_file.call_args == asynctest.call('tmp/test', 'foo: bla\n') assert self.control._save_to_s3.call_count == 0 res = await self.control.save("s3://tmp/test", {"foo": "bla"}) assert res is True assert self.control._save_to_file.call_count == 1 assert self.control._save_to_s3.call_count == 1 assert self.control._save_to_s3.call_args == asynctest.call('s3://tmp/test', 'foo: bla\n')
async def runTest(self): mock_command = create_mock_message( "!announce seT-Reactions operations 987 👍 👎", "the chan", user_roles=[self.mock_role]) await self.announcement_service.bot_command_callback(mock_command) self.mock_discord_service.get_matching_message.assert_called_with( "operations", 987) expected_calls = [call("👍"), call("👎")] self.mock_message_to_edit.add_reaction.assert_has_calls( calls=expected_calls, any_order=False) assert self.mock_message_to_edit.add_reaction.call_count == 2
async def test_sender_connection_calls(mock_connections, outside_source_stream, mock_replay_headers, event_loop): mock_header = mock_replay_headers() connection = mock_connections() mock_header.data = b"Header" sender = Sender(outside_source_stream) outside_source_stream.set_header(mock_header) outside_source_stream.feed_data(b"Data") f = asyncio.ensure_future(sender.handle_connection(connection)) await exhaust_callbacks(event_loop) outside_source_stream.finish() await f connection.write.assert_has_awaits([asynctest.call(b"Header"), asynctest.call(b"Data")]) await sender.wait_for_ended()
async def test_reconnect(self): self.source.listen = asynctest.CoroutineMock(return_value=None) cb = "test" res = self.source.reconnect(cb) asyncio.sleep(2) assert self.source.listen.call_count == 1 assert self.source.listen.call_args == asynctest.call(cb)
async def test_should_call_retrain_only_if_set(self, receive_data, train_predictor): self.predictor.retrain_period = None self.analyser.add_behaviour = Mock() await self.analyser.setup() self.assertEqual(self.analyser.add_behaviour.call_count, 1) self.analyser.add_behaviour.assert_has_calls([call(receive_data())])
async def test_001_store_replay(self): await self.bus.publish({'something': 'something'}) await self.bus.publish({'another': 'event'}) # Backend received the events await self.bus._persistence._empty_last_events() eq_(len(self.backend.events), 2) eq_(self.backend.events[0]['status'], EventStatus.FAILED.value) # Check replay send the same event event_0_uid = self.backend.events[0]['id'] event_1_uid = self.backend.events[1]['id'] with patch.object(self.bus, 'publish') as pub: await self.bus.replay() pub.assert_has_calls([ call({'something': 'something'}, topic='test', previous_uid=event_0_uid), call({'another': 'event'}, topic='test', previous_uid=event_1_uid), ])
async def test_handler_with_matcher_and_decorators(self): mock = Mock() def decorator_before(fn): async def before_handler(request): mock(sentinel.BEFORE) return await fn(request) return before_handler def decorator_after(fn): async def after_handler(request): mock(sentinel.AFTER) return await fn(request) return after_handler path, status, text = "/route", 201, "text" class App(jj.App): resolver = self.resolver @decorator_after @PathMatcher(resolver, path) @decorator_before async def handler(request): mock(sentinel.HANDLE) return Response(status=status, text=text) async with run(App()) as client: response = await client.get(path) self.assertEqual(response.status, status) self.assertEqual(await response.text(), text) response2 = await client.get("/") self.assertEqual(response2.status, 404) mock.assert_has_calls([ call(sentinel.AFTER), call(sentinel.BEFORE), call(sentinel.HANDLE) ]) self.assertEqual(mock.call_count, 3)
async def test_header_matcher_with_custom_submatcher(self): resolver = Mock() submatcher = Mock(spec=AttributeMatcher) submatcher.match.return_value = True matcher = HeaderMatcher(resolver, { "key1": submatcher, "key2": submatcher, "key3": 4, }) request = Mock(Request, headers=CIMultiDict([ ("key1", 1), ("key1", 2), ("key2", 3), ("key3", 4), ("key4", 5), ])) self.assertTrue(await matcher.match(request)) submatcher.match.assert_has_calls([ call([1, 2]), call([3]) ]) self.assertEqual(submatcher.match.call_count, 2)
async def test_resolve_request_with_multiple_handlers(self): matcher = CoroutineMock(side_effect=(False, True)) handler1 = CoroutineMock(return_value=sentinel.response1) handler2 = CoroutineMock(return_value=sentinel.response2) self.resolver.register_matcher(matcher, handler1) self.resolver.register_matcher(matcher, handler2) request = Mock() response = await self.resolver.resolve(request, self.default_app) self.assertEqual(response, handler2) handler1.assert_not_called() handler2.assert_not_called() matcher.assert_has_calls([call(request)] * 2) self.assertEqual(matcher.call_count, 2)
async def test_root_and_app_and_handler_middlewares_priority(self): mock = Mock() class Do: async def do(self, request, handler, app): mock(self.__class__.__name__, sentinel.BEFORE) response = await handler(request) mock(self.__class__.__name__, sentinel.AFTER) return response class Middleware(Do, RootMiddleware): resolver = self.resolver class AppMiddleware(Do, BaseMiddleware): resolver = self.resolver class HandlerMiddleware(Do, BaseMiddleware): resolver = self.resolver @AppMiddleware() class App(jj.App): resolver = self.resolver @MethodMatcher(resolver, "*") @HandlerMiddleware() async def handler(request): mock(App.__name__, sentinel.BEFORE) response = Response(status=200) mock(App.__name__, sentinel.AFTER) return response async with run(App(), middlewares=[Middleware()]) as client: response = await client.get("/") self.assertEqual(response.status, 200) mock.assert_has_calls([ call(AppMiddleware.__name__, sentinel.BEFORE), call(HandlerMiddleware.__name__, sentinel.BEFORE), call(Middleware.__name__, sentinel.BEFORE), call(App.__name__, sentinel.BEFORE), call(App.__name__, sentinel.AFTER), call(Middleware.__name__, sentinel.AFTER), call(HandlerMiddleware.__name__, sentinel.AFTER), call(AppMiddleware.__name__, sentinel.AFTER), ]) self.assertEqual(mock.call_count, 8)
async def test_app_and_handler_middlewares_priority(self): mock = Mock() class Middleware1(BaseMiddleware): async def do(self, request, handler, app): mock(self.__class__.__name__, sentinel.BEFORE) response = await handler(request) mock(self.__class__.__name__, sentinel.AFTER) return response class Middleware2(Middleware1): pass @Middleware1(self.resolver) class App(jj.App): resolver = self.resolver @MethodMatcher(resolver, "*") @Middleware2(resolver) async def handler(request): mock(App.__name__, sentinel.BEFORE) response = Response(status=200) mock(App.__name__, sentinel.AFTER) return response async with run(App()) as client: response = await client.get("/") self.assertEqual(response.status, 200) mock.assert_has_calls([ call(Middleware1.__name__, sentinel.BEFORE), call(Middleware2.__name__, sentinel.BEFORE), call(App.__name__, sentinel.BEFORE), call(App.__name__, sentinel.AFTER), call(Middleware2.__name__, sentinel.AFTER), call(Middleware1.__name__, sentinel.AFTER), ]) self.assertEqual(mock.call_count, 6)