def test_post(boto3): repo = MessageLakeMinioRepo(CONNECTION_DATA) boto3.client.assert_called_once() msg = _generate_msg_object(sender_ref='xxxx-xxxx-xxxx') assert repo.post(msg) s3_client = boto3.client.return_value assert s3_client.put_object.call_count == 2
def test_get(boto3): repo = MessageLakeMinioRepo(CONNECTION_DATA) boto3.client.assert_called_once() msg = _generate_msg_object(sender_ref='xxxx-xxxx-xxxx', status='pending') msg_dict = msg.to_dict() metadata = { 'status': 'received' } def get_object(**kwargs): key = kwargs['Key'] body = mock.MagicMock() data = None if key.endswith('metadata.json'): data = metadata elif key.endswith('content.json'): data = msg_dict if data: body.read.return_value = json.dumps(data).encode('utf-8') return { 'Body': body } s3_client = boto3.client.return_value s3_client.get_object.side_effect = get_object assert repo.get(str(msg.sender), str(msg.sender_ref)) exception = Exception() exception.response = { 'Error': { 'Code': 'NoSuchKey' } } def raise_error(on_key): def get_object_content(key): if key.endswith(on_key): raise exception else: return json.dumps(msg_dict) return get_object_content s3_client = boto3.client.return_value repo.get_object_content = mock.Mock() repo.get_object_content.side_effect = raise_error('metadata.json') assert repo.get(str(msg.sender), str(msg.sender_ref)) repo.get_object_content.side_effect = raise_error('content.json') assert not repo.get(str(msg.sender), str(msg.sender_ref)) exception.response['Error']['Code'] = 'Random' for key in ['content.json', 'metadata.json']: repo.get_object_content.side_effect = raise_error(key) with pytest.raises(Exception): repo.get(str(msg.sender), str(msg.sender_ref))
def test_update_metadata(boto3): repo = MessageLakeMinioRepo(CONNECTION_DATA) boto3.client.assert_called_once() msg = _generate_msg_object(sender_ref='xxxx-xxxx-xxxx', status='pending') metadata = {'status': 'received'} def get_object(**kwargs): key = kwargs['Key'] body = mock.MagicMock() if key.endswith('metadata.json'): body.read.return_value = json.dumps(metadata).encode('utf-8') return {'Body': body} return None s3_client = boto3.client.return_value s3_client.get_object.side_effect = get_object assert repo.update_metadata(str(msg.sender), str(msg.sender_ref), {'status': 'received'})
def test_post(sessionmaker, create_engine): def session_add(m): m.id = 1 query = mock.MagicMock() query.filter.return_value = query query.count.return_value = 0 session = sessionmaker.return_value.return_value session.query.return_value = query session.add.side_effect = session_add repo = PostgresRepo(CONNECTION_DATA) # testing post message = _generate_msg_object() assert repo.post(message) is 1 session.commit.assert_called_once() # testing duplicate query.count.return_value = 1 assert repo.post(message) is True
def test(): repo = MessageLakeRepo(CONF) repo._unsafe_clear_for_test() assert repo._unsafe_is_empty_for_test() message = _generate_msg_object( sender_ref='xxx-xxx-xxx', status=None, channel_id=None, channel_txn_id=None, ) assert repo.post(message) sender = str(message.sender) sender_ref = str(message.sender_ref) message_from_repo = repo.get(sender, sender_ref) assert message_from_repo assert isinstance(message_from_repo, Message) assert message_from_repo.to_dict() == message.to_dict() message.status = 'rejected' assert repo.update_metadata(sender, sender_ref, {'status': message.status}) message_from_repo = repo.get(sender, sender_ref) assert message_from_repo assert message_from_repo == message assert not repo.get('AU', 'aaaaa-bbbbb-ccccc') repo._unsafe_clear_for_test() assert repo._unsafe_is_empty_for_test() repo.put_message_related_object(sender=sender, sender_ref=sender_ref, rel_path='/content.json', content_body=json.dumps(message.to_dict())) message_from_repo = repo.get(sender, sender_ref) assert message == message_from_repo
def test(): repo = ObjectACLRepo(CONF) repo._unsafe_clear_for_test() assert repo._unsafe_is_empty_for_test() message = _generate_msg_object() obj = str(message.obj) receiver = str(message.receiver) # testing post actions assert repo.post(message) assert repo.allow_access_to(obj, receiver) # no filters with pytest.raises(Exception): repo.search() # invalid filters with pytest.raises(Exception): repo.search({'abrakadabra': 'value'}) search_result = repo.search({'object__eq': obj}) assert search_result assert message.receiver in search_result assert not repo.search({'object__eq': 'something_strange'})
def test(): # testing predicate in url search delivery_outbox_repo = DeliveryOutboxRepo(DELIVERY_OUTBOX_REPO_CONF) notifications_repo = NotificationsRepo(NOTIFICATIONS_REPO_CONF) subscriptions_repo = SubscriptionsRepo(SUBSCRIPTIONS_REPO_CONF) delivery_outbox_repo._unsafe_clear_for_test() notifications_repo._unsafe_clear_for_test() subscriptions_repo._unsafe_clear_for_test() assert notifications_repo._unsafe_is_empty_for_test() assert delivery_outbox_repo._unsafe_is_empty_for_test() assert subscriptions_repo._unsafe_is_empty_for_test() processor = CallbacksSpreaderProcessor( notifications_repo_conf=NOTIFICATIONS_REPO_CONF, delivery_outbox_repo_conf=DELIVERY_OUTBOX_REPO_CONF, subscriptions_repo_conf=SUBSCRIPTIONS_REPO_CONF) # testing that iter returns processor assert iter(processor) is processor # assert processor has nothing to do assert next(processor) is None _fill_subscriptions_repo(subscriptions_repo, SUBSCRIPTIONS) for prefix, subscriptions in SUBSCRIPTIONS_WITH_COMMON_PREFIXES.items(): _fill_subscriptions_repo(subscriptions_repo, subscriptions) # testing that subscriptions repod doesn't have side effect on processor assert next(processor) is None # testing callbacks spreading for predicates without common prefixes for predicate, number_of_subscribers in SUBSCRIPTIONS.items(): # pushing notification message = _generate_msg_object(predicate=predicate) notifications_repo.post_job(message) # test proccessor received notification assert next(processor) is True # test processor created correct number of delivery jobs # each subscriptions group has unique topic/predicate for i in range(number_of_subscribers): job = delivery_outbox_repo.get_job() assert job, f"Call:{i+1}. Predicate:{predicate}" message_queue_id, payload = job # test that only direct subscribers received this message assert payload.get('payload', {}).get('predicate') == predicate # testing that only correct subscribers will receive notification url = payload.get('s', '') assert _is_predicate_in_url(url, predicate), { 'url': url, 'predicate': predicate } assert delivery_outbox_repo.delete(message_queue_id) # test queue is empty assert not delivery_outbox_repo.get_job() # processor completed the job assert next(processor) is None for prefix, subscriptions in SUBSCRIPTIONS_WITH_COMMON_PREFIXES.items(): # finding longest predicate in the group + total number of expected # delivery jobs expect_jobs = 0 longest_predicate = "" for predicate, number_of_subscribers in subscriptions.items(): if len(longest_predicate) < len(predicate): longest_predicate = predicate expect_jobs += number_of_subscribers # posting notification message = _generate_msg_object(predicate=longest_predicate) assert notifications_repo.post_job(message) assert next(processor) is True # testing processor created the correct number of delivery jobs for i in range(expect_jobs): job = delivery_outbox_repo.get_job() assert job message_queue_id, payload = job # test that only direct subscribers received this message assert payload.get('payload', {}).get('predicate') == longest_predicate # testing that only correct subscribers will receive notification url = payload.get('s', '') assert _is_predicate_in_url(url, prefix), { 'url': url, 'prefix': prefix } assert delivery_outbox_repo.delete(message_queue_id) assert next(processor) is None
def test(): # creating testing versions of all required repos message_lake_repo = MessageLakeRepo(MESSAGE_LAKE_REPO_CONF) object_acl_repo = ObjectACLRepo(OBJECT_ACL_REPO_CONF) bc_inbox_repo = BCInboxRepo(BC_INBOX_REPO_CONF) object_retrieval_repo = ObjectRetrievalRepo(OBJECT_RETRIEVAL_REPO_CONF) notifications_repo = NotificationsRepo(NOTIFICATIONS_REPO_CONF) blockchain_outbox_repo = ApiOutboxRepo(BLOCKCHAIN_OUTBOX_REPO_CONF) def clear(): # clearing repos message_lake_repo._unsafe_method__clear() object_acl_repo._unsafe_method__clear() bc_inbox_repo._unsafe_method__clear() object_retrieval_repo._unsafe_method__clear() notifications_repo._unsafe_method__clear() blockchain_outbox_repo._unsafe_method__clear() # test repos are empty assert message_lake_repo.is_empty() assert object_acl_repo.is_empty() assert bc_inbox_repo.is_empty() assert object_retrieval_repo.is_empty() assert notifications_repo.is_empty() assert blockchain_outbox_repo.is_empty() clear() processor = InboundMessageProcessor( bc_inbox_repo_conf=BC_INBOX_REPO_CONF, message_lake_repo_conf=MESSAGE_LAKE_REPO_CONF, object_acl_repo_conf=OBJECT_ACL_REPO_CONF, object_retrieval_repo_conf=OBJECT_RETRIEVAL_REPO_CONF, notifications_repo_conf=NOTIFICATIONS_REPO_CONF, blockchain_outbox_repo_conf=BLOCKCHAIN_OUTBOX_REPO_CONF) # test iter processor returns processor assert iter(processor) is processor # test processor has no jobs assert next(processor) is None sender_ref = "AU:xxxx-xxxx-xxxx" status = 'received' message = _generate_msg_object(sender_ref=sender_ref, status=status) message.sender = "CN" assert bc_inbox_repo.post(message) # testing normal execution received message with sender ref assert next(processor) is True assert next(processor) is None # testing that message is deleted assert bc_inbox_repo.is_empty() # testing message posted to related repos assert not message_lake_repo.is_empty() assert not object_acl_repo.is_empty() # we can't say it's empty because worker gets values from there # assert not object_retrieval_repo.is_empty() # received status should not be posted to blockchain assert blockchain_outbox_repo.is_empty() clear() sender_ref = "AU:xxxx-xxxx-xxxx" # this one should go to blockchain outbox status = 'pending' message = _generate_msg_object(sender_ref=sender_ref, status=status) message.sender = OUR_JRD message.receiver = 'CN' assert bc_inbox_repo.post(message) # testing normal execution received message with sender ref assert next(processor) is True assert next(processor) is None # testing that message is deleted assert bc_inbox_repo.is_empty() # testing message posted to related repos assert not message_lake_repo.is_empty() assert not object_acl_repo.is_empty() clear() # message without sender ref should fail message = _generate_msg_object() assert bc_inbox_repo.post(message) assert next(processor) is False assert next(processor) is None
def _generate_message_mock(**kwargs): message_mock = mock.MagicMock() data = _generate_msg_object(**kwargs).to_dict() for key, value in data.items(): setattr(message_mock, key, value) return message_mock
def test_operations(boto3): sqs_client = boto3.client.return_value sqs_client.get_queue_url.return_value = {'QueueUrl': QUEUE_URL} repo = ElasticMQRepo(CONNECTION_DATA) # testing post message message = _generate_msg_object() assert repo.post(message) sqs_client.send_message.assert_called_once_with(QueueUrl=QUEUE_URL, MessageBody=json.dumps( message.to_dict()), DelaySeconds=0) boto3.reset_mock() # testing post message exception sqs_client.send_message.side_effect = Exception() assert not repo.post(message) # testing get message sqs_client.receive_message.return_value = { "Messages": [{ "ReceiptHandle": 'a', "Body": json.dumps(message.to_dict()) }] } queue_message_id, queue_message = repo.get() message_dict = message.to_dict() queue_message_dict = queue_message.to_dict() assert queue_message_id == 'a' for key, value in message_dict.items(): assert queue_message_dict[key] == value sqs_client.receive_message.assert_called_once() args, kwargs = sqs_client.receive_message.call_args_list[0] assert kwargs['MaxNumberOfMessages'] == 1 assert kwargs['VisibilityTimeout'] == 30 boto3.reset_mock() # testing get empty responses sqs_client.receive_message.return_value = {"Messages": []} assert not repo.get() sqs_client.receive_message.return_value = {} assert not repo.get() # testing get invalid message payload sqs_client.receive_message.return_value = { "Messages": [{ "ReceiptHandle": 'a', "Body": json.dumps({'Hello': 'world'}) }] } with pytest.raises(KeyError): repo.get() boto3.reset_mock() # testing post job sqs_client.send_message.side_effect = None job = {'Hello': 'job'} assert repo.post_job(job, delay_seconds=30) sqs_client.send_message.assert_called_once_with( QueueUrl=QUEUE_URL, DelaySeconds=30, MessageBody=json.dumps(job)) boto3.reset_mock() # testing post job exceptions # job is empty or None with pytest.raises(ValueError): repo.post_job({}) # delay seconds is not int with pytest.raises(TypeError): repo.post_job(job, delay_seconds="30") boto3.reset_mock() # testing get_job sqs_client.receive_message.return_value = { "Messages": [{ "ReceiptHandle": 'a', "Body": json.dumps(job) }] } queue_job_id, queue_job = repo.get_job() args, kwargs = sqs_client.receive_message.call_args_list[0] assert kwargs['MaxNumberOfMessages'] == 1 assert kwargs['VisibilityTimeout'] == 30 assert queue_job_id == 'a' assert queue_job == job # testing get_job empty responses sqs_client.receive_message.return_value = {"Messages": []} assert not repo.get_job() sqs_client.receive_message.return_value = {} assert not repo.get_job() boto3.reset_mock() # testing delete operation assert repo.delete(1) sqs_client.delete_message.assert_called_once_with(QueueUrl=QUEUE_URL, ReceiptHandle=1) boto3.reset_mock() sqs_client.delete_message.side_effect = Exception() assert not repo.delete(2) sqs_client.delete_message.assert_called_once()