def test_asyncjson(self): consumer = Spy(Request) ajson = ProxySpy(asyncjson.AsyncJSON({'id': 1, 'name': 'Test'})) r = yield ajson.begin(consumer) for p in r: pass assert_that(consumer.registerProducer, called().times(1)) assert_that(consumer.write, called()) assert_that(consumer.unregisterProducer, called().times(1)) assert_that(ajson.begin, called()) self.flushLoggedErrors()
def test_fabric_deploy_works(self): self.__prepare_file(True) fab = ProxySpy(FabricDeployer()) state.output.update({ 'status': False, 'stdout': False, 'warnings': False, 'debug': False, 'running': False, 'user': False, 'stderr': False, 'aborts': False }) tmpdir = tempfile.gettempdir() assert_that( fab.deploy(tmpdir + sep + 'fabric_deployer_test.dc'), is_(None)) assert_that(fab.deploy, called().times(1))
def api_client(self, monkeypatch): def create_dataset(self, owner, dataset, **kwargs): assert_that(owner, not_none()) assert_that(dataset, not_none()) assert_that(kwargs.keys(), only_contains('title', 'license', 'visibility')) return {} async def append_stream_chunked(self, owner, dataset, stream, queue, chunk_size, loop): while True: item = await queue.get() time.sleep(2) # Required delay queue.task_done() if item is None: break monkeypatch.setattr(ApiClient, 'append_stream_chunked', append_stream_chunked) monkeypatch.setattr(ApiClient, 'connection_check', lambda self: True) monkeypatch.setattr(ApiClient, 'get_current_version', lambda self, o, d, s: 123456) monkeypatch.setattr(ApiClient, 'create_dataset', create_dataset) monkeypatch.setattr(ApiClient, 'get_dataset', lambda self, o, d: {'status': 'LOADED'}) monkeypatch.setattr(ApiClient, 'set_stream_schema', lambda self, o, d, s, **k: {}) monkeypatch.setattr(ApiClient, 'sync', lambda self, o, d: {}) monkeypatch.setattr(ApiClient, 'truncate_stream_records', lambda self, o, d, s: {}) return ProxySpy(ApiClient('no_token_needed'))
def test_wrong_try_to_test_an_async_invocation(self): # given spy = ProxySpy(Collaborator()) sut = SUT(spy) # when sut.some_method() # then assert_that(spy.write, called(). async (1))
def test_fabric_deploy_works(self): self.__prepare_file(True) fab = ProxySpy(FabricDeployer()) state.output.update({ 'status': False, 'stdout': False, 'warnings': False, 'debug': False, 'running': False, 'user': False, 'stderr': False, 'aborts': False }) tmpdir = tempfile.gettempdir() assert_that(fab.deploy(tmpdir + sep + 'fabric_deployer_test.dc'), is_(None)) assert_that(fab.deploy, called().times(1))
def test_account_already_exists(self): with Stub(PasswordService) as password_service: password_service.generate().returns('some') with ProxySpy(AccountStore()) as store: store.has_user('John').returns(True) service = AccountService(store, password_service) with self.assertRaises(AlreadyExists): service.create_user('John')
def test_controller_render_delegates_on_routing(self): c = DummyController() router = ProxySpy(Router()) c._router = router # request = self.get_request() request = ControllerRequest(['/test'], {}) r = yield self._render(c, request) assert_that(router.dispatch, called().times(1)) self.assertEqual(r.written[0], 'ERROR 404: /dummy/test not found')
def test_consume_on_queue_but_no_consumer_interested_in_the_messages(self): serializer = SerializerStub() serialized_event = serializer.serialize(BananaHappened("apple")) messages = [ Message.create(serialized_event, None, serializer.identifier()) for _ in range(2) ] backend = a_backend_with_messages(messages) consumer = ProxySpy(NoBananaConsumer()) registry = SerializerRegistry(serializer_settings) sut = SimpleMessageDispatcher(consumer, serializer_registry=registry, backend=backend) sut.consume_event("queue") assert_that(consumer.process, never(called())) assert_that(backend.acknowledge, called().times(2))