def _generate_job(url, payload=None): if not payload: payload = _generate_msg_dict() return { 's': url, 'payload': payload }
def test_elasticmq_clear(docker_setup): repo = REPO_CLASS(docker_setup['elasticmq']) message = Message.from_dict(test_generic_message._generate_msg_dict()) assert repo.post(message) assert repo.post(message) repo._unsafe_method__clear() assert not repo.get()
def test_elasticmq_post_after_clear(docker_setup): repo = REPO_CLASS(docker_setup['elasticmq']) for i in range(5): message = gd.Message.from_dict(test_messages._generate_msg_dict()) repo._unsafe_clear_for_test() assert repo.post(message) assert repo.get()
def test_repository_post_returns_truithy( docker_setup, elasticmq_client): repo = REPO_CLASS(docker_setup['elasticmq']) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) assert repo.post(msg)
def test_repository_patch_status(docker_setup, pg_session): """ Posting a message returns an integer The integer returned is a unique local ID of the message in the repo. """ repo = postgresrepo.PostgresRepo(env_postgres_config('TEST')) # what is the statechart for api_outbox messages? for status in ('rejected', 'accepted'): msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) msg_id = repo.post(msg) updates = {"status": status} assert repo.patch(msg_id, updates) assert not repo.patch(msg_id, updates) fetched_msg = repo.get(msg_id) assert fetched_msg.status == status with pytest.raises(Exception): repo.patch(msg_id, {'receiver': 'ES'}) assert not repo.patch(msg_id, {'status': 'ahaha'}) assert not repo.patch(-10, {'status': 'rejected'})
def test_message_factory(): chan = channels.DiscreteGenericMemoryChannel(config=None) MessageClass = chan.message_factory() msg_dict = proto_tests._generate_msg_dict() msg = MessageClass.from_dict(msg_dict) assert isinstance(msg, proto.Message)
def test_repository_delete_by_id_returns_truthy(docker_setup, pg_session): repo = postgresrepo.PostgresRepo(env_postgres_config('TEST')) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) msg_id = repo.post(msg) assert repo.delete(msg_id) assert not repo.delete(msg_id)
def test_post(): chan = channels.DiscreteGenericMemoryChannel(config=None) MessageClass = chan.message_factory() msg_dict = proto_tests._generate_msg_dict() msg = MessageClass.from_dict(msg_dict) chan.post_message(msg) assert msg in chan.get_messages()
def test(docker_setup): message_lake_repo = MessageLakeRepo(MESSAGE_LAKE_REPO_CONF) rejected_message_repo = RejectedMessagesRepo(REJECTED_MESSAGES_REPO_CONF) # ensuring that repos are empty message_lake_repo._unsafe_clear_for_test() rejected_message_repo._unsafe_clear_for_test() updater = RejectedStatusUpdater( rejected_message_repo_conf=REJECTED_MESSAGES_REPO_CONF, message_lake_repo_conf=MESSAGE_LAKE_REPO_CONF) # checking that iter returns updater assert updater is iter(updater) # test no rejected messages in the queue assert not next(updater) # testing single message in the queue sender, sender_ref = MESSAGES_DATA[0] message = Message.from_dict( _generate_msg_dict(**{ SENDER_KEY: sender, SENDER_REF_KEY: sender_ref })) rejected_message_repo.post(message) message_lake_repo.post(message) assert next(updater) assert not next(updater) # testing several messages in queue for i in range(2): for sender, sender_ref in MESSAGES_DATA: message = Message.from_dict( _generate_msg_dict(**{ SENDER_KEY: sender, SENDER_REF_KEY: sender_ref })) rejected_message_repo.post(message) message_lake_repo.post(message) for i in range(len(MESSAGES_DATA)): assert next(updater) assert not next(updater)
def test_elasticmq_post_creates_a_message( docker_setup, elasticmq_client): repo = REPO_CLASS(docker_setup['elasticmq']) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) elasticmq_client.purge_queue(QueueUrl=repo.queue_url) assert not repo.get() assert repo.post(msg) assert repo.get()
def test_repository_post_message_returns_an_integer(docker_setup, pg_session): """ Posting a message returns an integer The integer returned is a unique local ID of the message in the repo. """ repo = postgresrepo.PostgresRepo(CONF) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) assert isinstance(repo.post(msg), int)
def test_elasticmq_get_returns_a_message_and_id( docker_setup, elasticmq_client): repo = REPO_CLASS(docker_setup['elasticmq']) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) elasticmq_client.purge_queue(QueueUrl=repo.queue_url) assert repo.post(msg) msg_id, msg = repo.get() assert isinstance(msg, gd.Message)
def test_elasticmq_delete_actually_does( docker_setup, elasticmq_client): repo = REPO_CLASS(docker_setup['elasticmq']) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) elasticmq_client.purge_queue(QueueUrl=repo.queue_url) assert repo.post(msg) msg_id, msg = repo.get() assert repo.delete(msg_id) assert not repo.get()
def test_patch_only_status_and_update_skip_for_equal_status(): # testing only status update / ingoring other payload fields payload = _generate_msg_dict() payload[STATUS_KEY] = STATUS_PENDING message = Message.from_dict(payload) notifications_repo = mock.MagicMock() message_lake_repo = mock.MagicMock() message_lake_repo.get.return_value = message new_payload = _generate_msg_dict() new_payload[STATUS_KEY] = STATUS_ACCEPTED uc = PatchMessageMetadataUseCase(message_lake_repo, notifications_repo) uc.execute( MESSAGE_REF, new_payload ) message_lake_repo.get.assert_called() message_lake_repo.update_metadata.assert_called_once_with( SENDER, SENDER_REF, {STATUS_KEY: new_payload[STATUS_KEY]} ) notifications_repo.post_job.assert_called() # testing ingoring update for the equal status values notifications_repo.reset_mock() message_lake_repo.reset_mock() uc.execute( MESSAGE_REF, payload ) message_lake_repo.get.assert_called() message_lake_repo.update_metadata.assert_not_called() notifications_repo.post_job.assert_not_called()
def test_repository_get_by_id(docker_setup, pg_session): repo = postgresrepo.PostgresRepo(env_postgres_config('TEST')) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) msg_id = repo.post(msg) msg2 = repo.get(msg_id) assert str(msg.sender) == str(msg2.sender) assert str(msg.receiver) == str(msg2.receiver) assert str(msg.subject) == str(msg2.subject) assert str(msg.obj) == str(msg2.obj) assert str(msg.predicate) == str(msg2.predicate) # never mind status, it's unlikely but possible that # a race condition could make them different assert not repo.get(-10)
def test_repository_post_message_rejects_duplicates(docker_setup, pg_session): ''' Posting duplicate messages should fail (to, subject, predicate) is unique, among pending and accepted messages. ''' repo = postgresrepo.PostgresRepo(CONF) msg_dict = test_messages._generate_msg_dict() msg = gd.Message.from_dict(msg_dict) m1 = repo.post(msg) assert m1 is not None # first one should work assert repo.post( msg ) is True # the second post returns True due to no error but does nothing updates = {'status': 'rejected'} repo.patch(m1, updates) m2 = repo.post(msg) # unless the first one was rejected assert m2
def __init__(self): self.valid_messages = [] for i in range(9): self.valid_messages.append( protocol.Message.from_dict( test_protocol._generate_msg_dict()))
def valid_message_dicts(): out = [] for i in range(9): out.append( tgm._generate_msg_dict()) return out
def minio_data(): return [test_messages._generate_msg_dict() for x in range(9)]
def valid_message_dicts(): out = [] for i in range(1): out.append(test_protocol._generate_msg_dict()) return out
def _generate_msg(**kwargs): return Message.from_dict(_generate_msg_dict(**kwargs))