def _get_docapi_auth_headers(self, source_jurisdiction, doc_api_url): """ We support 2 auth methods: * dumb - no auth in fact, there are static dict of some demo data is passed around * AWS OIDC/Cognito - when we have some client creds in the env variables # and are able to retrieve short-living JWT using them, and then use # the API using that JWT. TODO: it's probably worth configuring it the same way like we do with channels """ source_jurisdiction = str(source_jurisdiction) if doc_api_url.startswith("http://"): # local/demo setup logger.info("For document API request to %s the dumb auth is used", doc_api_url) return { 'Authorization': 'JWTBODY {}'.format( json.dumps({ "sub": "documents-api", "party": "spider", "jurisdiction": self.jurisdiction.name, })) } try: # first we try to determine the oauth credentials for that COGNITO_OAUTH_CREDENTIALS = { "client_id": env_json( "IGL_JURISDICTION_OAUTH_CLIENT_ID")[source_jurisdiction], "client_secret": env_json("IGL_JURISDICTION_OAUTH_CLIENT_SECRET") [source_jurisdiction], "scopes": env_json("IGL_JURISDICTION_OAUTH_SCOPES")[source_jurisdiction], "wellknown_url": env_json("IGL_JURISDICTION_OAUTH_WELLKNOWN_URL") [source_jurisdiction], } except (KeyError, TypeError) as e: # It seems that we don't have it configured - so use the demo auth logger.info( "We don't have the only supported real auth method configured (%s)", str(e)) COGNITO_OAUTH_CREDENTIALS = {} if COGNITO_OAUTH_CREDENTIALS: logger.info("Will try to retrieve JWT for %s", doc_api_url) # good, may be we try to request some JWT? return self._get_auth_headers( auth_method="Cognito/JWT", # auth well known urls and other configuration auth_parameters=COGNITO_OAUTH_CREDENTIALS) return {}
class Config(object): # configuration for domain module, may mix some different # things like Countries and wire protocols, so once is starts to be # disturbing we may split it further. DOCUMENT_REPOS = env_json("IGL_COUNTRY_DOCUMENT_REPORTS", default=env_json( "IGL_CONTRY_DOCUMENT_REPORTS", default={ 'AU': 'http://127.0.0.1:7770/documents/', 'CN': 'http://127.0.0.1:7771/documents/', 'NZ': 'http://127.0.0.1:7782/documents/', 'SG': 'http://127.0.0.1:7784/documents/', }))
class Config(object): # configuration for domain module, may mix some different # things like Countries and wire protocols, so once is starts to be # disturbing we may split it further. DOCUMENT_REPOS = env_json("IGL_CONTRY_DOCUMENT_REPORTS", default={ 'AU': 'http://127.0.0.1:7770/documents/', 'CN': 'http://127.0.0.1:7771/documents/', 'JP': 'http://127.0.0.1:7773/documents/', 'KR': 'http://127.0.0.1:7774/documents/', 'TH': 'http://127.0.0.1:7775/documents/', 'BN': 'http://127.0.0.1:7776/documents/', 'MM': 'http://127.0.0.1:7777/documents/', 'KH': 'http://127.0.0.1:7778/documents/', 'ID': 'http://127.0.0.1:7779/documents/', 'LA': 'http://127.0.0.1:7780/documents/', 'MY': 'http://127.0.0.1:7781/documents/', 'NZ': 'http://127.0.0.1:7782/documents/', 'PH': 'http://127.0.0.1:7783/documents/', 'SG': 'http://127.0.0.1:7784/documents/', 'VN': 'http://127.0.0.1:7785/documents/', 'US': 'http://127.0.0.1:7786/docs/', 'ES': 'http://127.0.0.1:7787/documentos/', 'SK': 'http://127.0.0.1:7788/dokumenty/', 'GB': 'http://127.0.0.1:7789/documents/', })
class MultichannelWorker(object): """ Iterate over the RouteToChannelUseCase. """ ROUTING_TABLE = env_json("IGL_MCHR_ROUTING_TABLE", default=[]) def _prepare_outbox_repo(self, conf): outbox_repo_conf = env_postgres_config('PROC_BCH_OUTBOX') if conf: outbox_repo_conf.update(conf) self.outbox_repo = ApiOutboxRepo(outbox_repo_conf) def _prepare_message_updates_repo(self, conf): # This repo used to talk to the message updater microservice, # which just changes statuses in the message lake repo_conf = env_queue_config('MCHR_MESSAGE_UPDATES_REPO', use_default=False) if not repo_conf: repo_conf = env_queue_config('BCH_MESSAGE_UPDATES') if conf: repo_conf.update(conf) self.message_updates_repo = MessageUpdatesRepo(repo_conf) def _prepare_use_cases(self): self.uc = RouteToChannelUseCase(self.ROUTING_TABLE) def _prepare_channels(self): """ For each channel in the use-case we create channel object and put it into the route table; so underlying use-cases don't think about it at all and just use the object. """ for routing_rule in self.ROUTING_TABLE: routing_rule["ChannelInstance"] = HttpApiChannel( routing_rule.copy()) return def _update_message_status(self, msg, new_status, channel_id=None, channel_msg_id=None): # In the message lake # if channel_id == DiscreteGenericMemoryChannel.ID: # channel_response = json.loads(channel_response) # channel_txn_id = channel_response['link'].split('=')[1] # else: # return False patch_data = { gd.STATUS_KEY: new_status, } if channel_id and channel_msg_id: patch_data.update({ gd.CHANNEL_ID_KEY: channel_id, gd.CHANNEL_TXN_ID_KEY: channel_msg_id, }) return self.message_updates_repo.post_job( { 'message': msg.to_dict(), 'patch': patch_data }, delay_seconds=random.randint(2, 7)) def __init__(self, outbox_repo_conf=None, channel_pending_message_repo_conf=None, message_updates_repo_conf=None, config=None): # self._prepare_config(config) self._prepare_outbox_repo(outbox_repo_conf) # self._prepare_channel_pending_message_repo(channel_pending_message_repo_conf) self._prepare_message_updates_repo(message_updates_repo_conf) self._prepare_use_cases() self._prepare_channels() def __iter__(self): logger.info("Starting the multichannel worker with channels %s", [ch["Name"] for ch in self.ROUTING_TABLE]) return self def __next__(self): try: pg_msg = self.outbox_repo.get_next_pending_message() if not pg_msg: return None logger.info("Processing message %s (%s)", pg_msg, pg_msg.id) self.outbox_repo.patch(pg_msg.id, {'status': 'sending'}) # If not result message wasn't posted to channel # it looks like ok situation from the use case point of view # therefore we just silently return None # BUT we probably want to change status of the message in # outbox_repo # first we convert message from the # intergov.repos.api_outbox.postgres_objects.Message # to # intergov.domain.wire_protocolsgeneric_discrete.Message # (actual while we use postgres as a storage for outbox repo) assert isinstance(pg_msg, PostgresMessageRepr) gd_msg = gd.Message.from_dict(pg_msg.to_dict()) try: result = self.uc.execute(gd_msg) except Exception as e: # sleep some seconds after fails logger.error("[%s] Rejecting due to use-case exception %s", gd_msg.sender_ref, str(e)) self.outbox_repo.patch(pg_msg.id, {'status': 'rejected'}) for i in range(random.randint(30, 100)): time.sleep(0.1) return False if result: # message has been sent somewhere recipient_channel_id, recipient_channel_message_id = result logger.info("[%s] The message has been sent to channel %s", gd_msg.sender_ref, recipient_channel_id) self._update_message_status( gd_msg, new_status="accepted", channel_id=recipient_channel_id, channel_msg_id=recipient_channel_message_id) if not self.outbox_repo.patch(pg_msg.id, {'status': 'accepted'}): logger.warning("[%s] Failed to update msg in outbox", gd_msg.sender_ref) result = False else: result = True else: # no channel accepted the message or there was other error logger.warning("[%s] Message has NOT been sent", gd_msg.sender_ref) self._update_message_status(gd_msg, "rejected") self.outbox_repo.patch(pg_msg.id, {'status': 'rejected'}) result = False return result except Exception as e: logger.exception(e) return None return True
def test_simple(): # test env_none assert not os.environ.get(TEST_ENV_NONE_KEY) assert not conf.env_none(TEST_ENV_NONE_KEY) assert TEST_DEFAULT == conf.env_none(TEST_ENV_NONE_KEY, default=TEST_DEFAULT) os.environ[TEST_ENV_NONE_KEY] = TEST_NON_DEFAULT_VALUE assert TEST_NON_DEFAULT_VALUE == conf.env_none(TEST_ENV_NONE_KEY, default=TEST_DEFAULT) os.environ[TEST_ENV_NONE_KEY] = "" assert conf.env_none(TEST_ENV_NONE_KEY) is None # test env_bool assert not os.environ.get(TEST_ENV_BOOL_VALUE_KEY) for env_value, value in TEST_ENV_BOOL_VALUES: os.environ[TEST_ENV_BOOL_VALUE_KEY] = env_value assert conf.env_bool(TEST_ENV_BOOL_VALUE_KEY) == value del os.environ[TEST_ENV_BOOL_VALUE_KEY] assert conf.env_bool(TEST_ENV_BOOL_VALUE_KEY, default=True) assert conf.env_bool(TEST_ENV_BOOL_VALUE_KEY, default=None, nullable=True) is None os.environ[TEST_ENV_BOOL_VALUE_KEY] = 'False' assert conf.env_bool(TEST_ENV_BOOL_VALUE_KEY, default=True) is False # not none exception del os.environ[TEST_ENV_BOOL_VALUE_KEY] with pytest.raises(Exception) as e: conf.env_bool(TEST_ENV_BOOL_VALUE_KEY, nullable=False, default=None) assert str( e.value) == "Variable {} can't be None".format(TEST_ENV_BOOL_VALUE_KEY) # unknown string value os.environ[TEST_ENV_BOOL_VALUE_KEY] = "FalseFalseTrue" with pytest.raises(Exception) as e: conf.env_bool(TEST_ENV_BOOL_VALUE_KEY) assert str(e.value) == "Unknown value for variable {}: '{}'".format( TEST_ENV_BOOL_VALUE_KEY, os.environ[TEST_ENV_BOOL_VALUE_KEY]) # invalid default value del os.environ[TEST_ENV_BOOL_VALUE_KEY] with pytest.raises(Exception) as e: conf.env_bool(TEST_ENV_BOOL_VALUE_KEY, default=dict(msg="Hello")) assert str(e.value) == "Unknown value type for variable {}: '{}'".format( TEST_ENV_BOOL_VALUE_KEY, dict) # test env_json assert not os.environ.get(TEST_ENV_JSON_KEY) os.environ[TEST_ENV_JSON_KEY] = json.dumps(TEST_ENV_JSON_DICT) assert conf.env_json(TEST_ENV_JSON_KEY) == TEST_ENV_JSON_DICT del os.environ[TEST_ENV_JSON_KEY] assert conf.env_json( TEST_ENV_JSON_KEY, default=TEST_ENV_JSON_DEFAULT) == TEST_ENV_JSON_DEFAULT os.environ[TEST_ENV_JSON_KEY] = TEST_ENV_INVALID_JSON_STR with pytest.raises(ValueError): conf.env_json(TEST_ENV_JSON_KEY)