def __init__(self): channel_notification_repo_conf = env_queue_config( 'CHANNEL_NOTIFICATION_REPO') channel_notification_repo = ChannelNotificationRepo( channel_notification_repo_conf) bc_inbox_repo_conf = env_queue_config('PROC_BC_INBOX') bc_inbox_repo = BCInboxRepo(bc_inbox_repo_conf) self.use_case = ProcessChannelNotificationUseCase( channel_notification_repo=channel_notification_repo, bc_inbox_repo=bc_inbox_repo, routing_table=ROUTING_TABLE)
class Config: DEBUG = config('DEBUG', default=True, cast=bool) TESTING = config('TESTING', default=False, cast=bool) SERVICE_NAME = config('SERVICE_NAME', default='api-channel') ENDPOINT = config("ENDPOINT", default='AU') SERVICE_URL = config("SERVICE_URL", default='http://api-channel') FOREIGN_ENDPOINT_URL = config( "FOREIGN_ENDPOINT_URL", default='http://foreign-api-channel/incoming/messages') SUBSCRIPTIONS_REPO_CONF = env_s3_config('SUBSCRIPTIONS_REPO') NOTIFICATIONS_REPO_CONF = env_queue_config('NOTIFICATIONS_REPO') DELIVERY_OUTBOX_REPO_CONF = env_queue_config('DELIVERY_OUTBOX_REPO') LOG_FORMATTER_JSON = False
def test_complex(): assert igl_value('default_sqs', 'qname') == 'IGL_DEFAULT_SQS_QNAME' assert igl_value('default_s3', 'bucket') == 'IGL_DEFAULT_S3_BUCKET' assert igl_value('default_postgres', 'dbname') == 'IGL_DEFAULT_POSTGRES_DBNAME' for key, value in TEST_ENV.items(): assert os.environ.get(key) == value default_s3_conf = conf.env_s3_config(TEST_NON_EXISTING_CONF_NAME) default_sqs_conf = conf.env_queue_config(TEST_NON_EXISTING_CONF_NAME) default_postgres_conf = conf.env_postgres_config( TEST_NON_EXISTING_CONF_NAME) assert to_str_dict(default_s3_conf) == TEST_S3_DEFAULT_CONF assert to_str_dict(default_sqs_conf) == TEST_SQS_DEFAULT_CONF assert to_str_dict(default_postgres_conf) == TEST_POSTGRES_DEFAULT_CONF # bool values assert isinstance(default_s3_conf['use_ssl'], bool) assert isinstance(default_sqs_conf['use_ssl'], bool) # nullable values del os.environ['IGL_DEFAULT_SQS_QNAME'] del os.environ['IGL_DEFAULT_S3_BUCKET'] del os.environ['IGL_DEFAULT_POSTGRES_DBNAME'] default_s3_conf = conf.env_s3_config(TEST_NON_EXISTING_CONF_NAME) default_sqs_conf = conf.env_queue_config(TEST_NON_EXISTING_CONF_NAME) default_postgres_conf = conf.env_postgres_config( TEST_NON_EXISTING_CONF_NAME) assert default_s3_conf['bucket'] is None assert default_sqs_conf['queue_name'] is None assert default_postgres_conf['dbname'] is None # testing fully custom configs custom_s3_conf = conf.env_s3_config(CUSTOM_S3_CONF_NAME) custom_sqs_conf = conf.env_queue_config(CUSTOM_SQS_CONF_NAME) custom_postgres_conf = conf.env_postgres_config(CUSTOM_POSTGRES_CONF_NAME) assert custom_s3_conf == CUSTOM_S3_CONF, os.environ assert custom_sqs_conf == CUSTOM_SQS_CONF, os.environ assert custom_postgres_conf == CUSTOM_POSTGRES_CONF, os.environ
def __init__(self): if not hasattr(self, 'SUBSCRIPTIONS_REPO_CONF'): if not environ.get("IGL_SUBSCRIPTIONS_REPO_BUCKET" ) and environ.get('IGL_CHANNEL_REPO_BUCKET'): # we don't have subscr repo but have channel repo, which can be re-used # because data won't be overlapping self.SUBSCRIPTIONS_REPO_CONF = env_s3_config('CHANNEL_REPO') else: self.SUBSCRIPTIONS_REPO_CONF = env_s3_config( 'SUBSCRIPTIONS_REPO') if not hasattr(self, 'NOTIFICATIONS_REPO_CONF'): self.NOTIFICATIONS_REPO_CONF = env_queue_config( 'NOTIFICATIONS_REPO') if not hasattr(self, 'DELIVERY_OUTBOX_REPO_CONF'): self.DELIVERY_OUTBOX_REPO_CONF = env_queue_config( 'DELIVERY_OUTBOX_REPO') if not hasattr(self, 'CHANNEL_REPO_CONF'): self.CHANNEL_REPO_CONF = env_s3_config('CHANNEL_REPO')
class Config(object): """Base configuration.""" DEBUG = env_bool('IGL_DEBUG', default=True) TESTING = env_bool('IGL_TESTING', default=True) CHANNEL_NOTIFICATION_REPO_CONF = env_queue_config( 'CHANNEL_NOTIFICATION_REPO') ROUTING_TABLE = env_json("IGL_MCHR_ROUTING_TABLE") SENTRY_DSN = env("SENTRY_DSN", default=None)
def get_processor(): delivery_outbox_repo_conf = env_queue_config('PROC_DELIVERY_OUTBOX_REPO') delivery_outbox_repo = DeliveryOutboxRepo(delivery_outbox_repo_conf) use_case = DeliverCallbackUseCase( delivery_outbox_repo=delivery_outbox_repo, ) return Processor(use_case=use_case)
from libtrustbridge.utils.conf import env_queue_config, env_s3_config from libtrustbridge.websub import repos from libtrustbridge.websub.domain import Pattern from libtrustbridge.websub.processors import Processor from intergov.use_cases import DispatchMessageToSubscribersUseCase from tests.unit.domain.wire_protocols.test_generic_message import ( _generate_msg_object ) DELIVERY_OUTBOX_REPO_CONF = env_queue_config('TEST_1') NOTIFICATIONS_REPO_CONF = env_queue_config('TEST_2') SUBSCRIPTIONS_REPO_CONF = env_s3_config('TEST') CALLBACK_URL = "http://test-server-dummy-test-helper:5000/response/200/{}" DEFAULT_EXPIRATION = 3600 * 2 # predicates dont have common prefixes SUBSCRIPTIONS = { 'aaa.bbb.ccc.ddd': 2, 'eee.fff.jjj': 3, 'hhh.iii': 4, 'ggg': 1 } SUBSCRIPTIONS_WITH_COMMON_PREFIXES = { 'ooo': { 'ooo.aaa.bbb': 2, 'ooo.aaa': 3,
from libtrustbridge.utils.conf import env_queue_config from libtrustbridge.websub.processors import Processor from libtrustbridge.websub.repos import DeliveryOutboxRepo from intergov.use_cases import DeliverCallbackUseCase from tests.unit.domain.wire_protocols.test_generic_message import ( _generate_msg_dict ) DELIVERY_OUTBOX_REPO_CONF = env_queue_config('TEST') POST_SUCCESS_URL = "http://test-server-dummy-test-helper:5000/response/200/success" POST_ERROR_URL = "http://test-server-dummy-test-helper:5000/response/500/internal-error" def _generate_job(url, payload=None): if not payload: payload = _generate_msg_dict() return { 's': url, 'payload': payload } def _test_retries(processor, max_attempts): retries = 0 attempts = 0 while retries < DeliverCallbackUseCase.MAX_RETRIES and attempts < max_attempts: if next(processor) is False: retries += 1 attempts += 1
def Config(): environment_config = dict( NOTIFICATIONS_REPO=env_queue_config('NOTIFICATIONS_REPO'), CHANNEL_REPO=env_queue_config('CHANNEL_REPO'), RECEIVER=env('RECEIVER')) return Box(environment_config)
import pytest from http import HTTPStatus import urllib import requests from src import repos from libtrustbridge.utils.conf import env_s3_config, env_queue_config, env NOTIFICATIONS_REPO = env_queue_config('NOTIFICATIONS_REPO') DELIVERY_OUTBOX_REPO = env_queue_config('DELIVERY_OUTBOX_REPO') SUBSCRIPTIONS_REPO = env_s3_config('SUBSCRIPTIONS_REPO') CHANNEL_REPO = env_queue_config('CHANNEL_REPO') ENDPOINT = env('ENDPOINT', default='AU') @pytest.fixture(scope='function') def notifications_repo(): repo = repos.Notifications(NOTIFICATIONS_REPO) repo.WAIT_FOR_MESSAGE_SECONDS = 1 repo._unsafe_method__clear() yield repo @pytest.fixture(scope='function') def delivery_outbox_repo(): repo = repos.DeliveryOutbox(DELIVERY_OUTBOX_REPO) repo.WAIT_FOR_MESSAGE_SECONDS = 1 repo._unsafe_method__clear() yield repo @pytest.fixture(scope='function')
def Config(): environment_config = dict( NOTIFICATIONS_REPO=env_queue_config('NOTIFICATIONS_REPO'), DELIVERY_OUTBOX_REPO=env_queue_config('DELIVERY_OUTBOX_REPO'), SUBSCRIPTIONS_REPO=env_s3_config('SUBSCRIPTIONS_REPO')) return Box(environment_config)
def Config(): environment_config = dict( DELIVERY_OUTBOX_REPO=env_queue_config('DELIVERY_OUTBOX_REPO'), TOPIC_BASE_SELF_URL=env('TOPIC_BASE_SELF_URL', '/topic'), CHANNEL_URL=env('CHANNEL_URL')) return Box(environment_config)