Beispiel #1
0
import random
import time

from intergov.conf import env, env_s3_config, env_queue_config
from intergov.domain.country import Country
from intergov.repos.object_lake import ObjectLakeRepo
from intergov.repos.object_retrieval import ObjectRetrievalRepo
from intergov.repos.object_acl import ObjectACLRepo
from intergov.use_cases import RetrieveAndStoreForeignDocumentsUseCase

from intergov.loggers import logging

logger = logging.getLogger('obj_spider')


class ObjectSpider(object):
    """
    Iterate over the RetrieveAndStoreForeignDocumentUseCase.
    """
    def __init__(self):
        self._prepare_repos_confs()
        self._prepare_repos()
        self._prepare_use_case()

    def _prepare_repos_confs(self):
        self.repo_conf = {
            'object_lake': env_s3_config('PROC_OBJ_SPIDER_OBJ_LAKE'),
            'object_retrieval':
            env_queue_config('PROC_OBJ_SPIDER_OBJ_RETRIEVAL'),
            'object_acl': env_s3_config('PROC_OBJ_SPIDER_OBJ_ACL'),
        }
from intergov.loggers import logging
from intergov.monitoring import statsd_timer
from intergov.use_cases.common import BaseUseCase

logger = logging.getLogger(__name__)


class EnqueueMessageFailure(Exception):
    pass


class EnqueueMessageUseCase(BaseUseCase):
    """
    Used by the message_api(tx) and message_rx_api

    The message is received from:
        foreign source (blockchain or other)
        local source (exporter_app, chambers_app, etc)

    It already contains sender_ref value for both cases (set by a remote party
    or message_api for local messages)

    Quickly fires the message to message queue (SQS or elasticMQ)
    and exits, leaving real processing to the next layers and do not requiring
    api users to wait extra time.
    """

    def __init__(self, bc_inbox_repo):
        self.bc_inbox = bc_inbox_repo

    @statsd_timer("usecase.EnqueueMessageUseCase.execute")
Beispiel #3
0
import time
import requests
from http import HTTPStatus
from intergov.apis.common.interapi_auth import AuthMixin
from intergov.conf import env_queue_config
from intergov.domain.wire_protocols import generic_discrete as gd
from intergov.repos.message_updates import MessageUpdatesRepo
from intergov.loggers import logging
from intergov.processors.common.env import (
    MESSAGE_PATCH_API_ENDPOINT,
    MESSAGE_PATCH_API_ENDPOINT_AUTH,
)
from intergov.processors.common.utils import get_message_patch_api_endpoint_auth_params

logger = logging.getLogger('message_updater')


class MessageUpdater(AuthMixin, object):
    """
    Iterate over message update jobs:

    * get a job from the queue
    * after some job validation, update the message using the API
    * if sucessful, delete the job from the queue
    * if unsucessful, increment retry counter and reschedule attempt
    """

    # TODO: FIXME: push business logic into a testable use_case object
    # maybe also put the "update job" into a request model
    # TODO: tar-pit algorithm on retrys?
    # (prevent thundering herd after outage)
Beispiel #4
0
import time

from intergov.conf import env_queue_config
from intergov.repos.delivery_outbox import DeliveryOutboxRepo
from intergov.use_cases import DeliverCallbackUseCase

from intergov.loggers import logging

logger = logging.getLogger('callback_deliver')


class CallbacksDeliveryProcessor(object):
    """
    Iterate over the DeliverCallbackUseCase.
    """
    def _prepare_delivery_outbox_repo(self, conf):
        delivery_outbox_repo_conf = env_queue_config(
            'PROC_DELIVERY_OUTBOX_REPO')
        if conf:
            delivery_outbox_repo_conf.update(conf)
        self.delivery_outbox_repo = DeliveryOutboxRepo(
            delivery_outbox_repo_conf)

    def _prepare_use_cases(self):
        self.uc = DeliverCallbackUseCase(
            delivery_outbox_repo=self.delivery_outbox_repo, )

    def __init__(self, delivery_outbox_repo_conf=None):
        self._prepare_delivery_outbox_repo(delivery_outbox_repo_conf)
        self._prepare_use_cases()
Beispiel #5
0
import random
import time

from intergov.conf import env_s3_config, env_queue_config
from intergov.repos.message_lake import MessageLakeRepo
from intergov.repos.rejected_message import RejectedMessagesRepo
from intergov.use_cases import RejectPendingMessageUseCase

from intergov.loggers import logging

logger = logging.getLogger('rejected_status_updater')


class RejectedStatusUpdater(object):
    """
    Iterate over RejectPendingMessageUseCase
    """
    def __init__(self,
                 rejected_message_repo_conf=None,
                 message_lake_repo_conf=None):
        self._prepare_repos_confs(rejected_message_repo_conf,
                                  message_lake_repo_conf)
        self._prepare_repos()
        self._prepare_use_case()

    def _prepare_repos_confs(self,
                             rejected_message_repo_conf=None,
                             message_lake_repo_conf=None):
        self.rejected_messages_repo_conf = env_queue_config(
            'PROC_REJECTED_MESSAGES_REPO')
        self.message_lake_repo_conf = env_s3_config('PROC_MESSAGE_LAKE_REPO')
Beispiel #6
0
import time

from libtrustbridge.websub.repos import NotificationsRepo, DeliveryOutboxRepo, SubscriptionsRepo

from intergov.conf import env_s3_config, env_queue_config

from intergov.use_cases import (
    DispatchMessageToSubscribersUseCase, )

from intergov.loggers import logging

logger = logging.getLogger('callbacks_spreader')


class CallbacksSpreaderProcessor(object):
    """
    Convert each incoming message to set of messages containing (websub_url, message)
    so they may be sent and fail separately
    """
    def _prepare_notifications_repo(self, conf):
        notifications_repo_conf = env_queue_config('PROC_OBJ_OUTBOX_REPO')
        if conf:
            notifications_repo_conf.update(conf)
        self.notifications_repo = NotificationsRepo(notifications_repo_conf)

    def _prepare_outbox_repo(self, conf):
        outbox_repo_conf = env_queue_config('PROC_DELIVERY_OUTBOX_REPO')
        if conf:
            outbox_repo_conf.update(conf)
        self.delivery_outbox_repo = DeliveryOutboxRepo(outbox_repo_conf)
Beispiel #7
0
from urllib.parse import urljoin

import requests

from intergov.loggers import logging
from intergov.use_cases.get_cognito_auth import GetCognitoAuthUseCase

logger = logging.getLogger()


class InvalidSubscriptionParameters(Exception):
    pass


class ChannelApiFailure(Exception):
    pass


class SubscriptionFailure(ChannelApiFailure):
    pass


class RequestChannelAPIUseCase:
    CHANNEL_API_GET_MESSAGE_ENDPOINT = '/messages'
    CHANNEL_API_SUBSCRIBE_BY_JURISDICTION_ENDPOINT = '/messages/subscriptions/by_jurisdiction'

    def __init__(self, channel_config):
        self.config = channel_config
        self.auth_use_case = None

        if self.config.get("ChannelAuth") == "Cognito/JWT":
Beispiel #8
0
import time

from intergov.repos.bc_inbox.elasticmq.elasticmqrepo import BCInboxRepo
from intergov.conf import env, env_s3_config, env_queue_config, env_postgres_config
from intergov.repos.api_outbox import ApiOutboxRepo
from intergov.repos.message_lake import MessageLakeRepo
from intergov.repos.object_acl import ObjectACLRepo
from intergov.repos.object_retrieval import ObjectRetrievalRepo
from intergov.repos.notifications import NotificationsRepo
from intergov.use_cases import ProcessMessageUseCase

from intergov.loggers import logging

logger = logging.getLogger('message_processor')


class InboundMessageProcessor(object):
    """
    Efficiently iterate over the ProcessMessageUseCase.
    """

    def _prepare_bc_inbox_repo(self, conf):
        bc_inbox_repo_conf = env_queue_config('PROC_BC_INBOX')
        if conf:
            bc_inbox_repo_conf.update(conf)
        self.bc_inbox_repo = BCInboxRepo(bc_inbox_repo_conf)

    def _prepare_message_lake_repo(self, conf):
        message_lake_repo_conf = env_s3_config('PROC_MESSAGE_LAKE')
        if conf:
            message_lake_repo_conf.update(conf)
Beispiel #9
0
from flask import Flask, Response

from intergov.apis.common.errors import handlers
from intergov.loggers import logging  # NOQA

logger = logging.getLogger('dummy-test-helper')

app = Flask(__name__)
handlers.register(app)


@app.route('/response/<int:status_code>/<message>', methods=['GET', 'POST'])
def request_response(status_code, message):
    logger.info(
        f'Requested response with status: {status_code} and message:{message}')
    return Response(message, status=status_code)
Beispiel #10
0
local storage - postgres for example (and for the demo)
"""
import random
import time

from intergov.conf import env_json, env_postgres_config, env_queue_config
from intergov.channels.http_api_channel import HttpApiChannel
from intergov.repos.api_outbox import ApiOutboxRepo
from intergov.repos.api_outbox.postgres_objects import Message as PostgresMessageRepr
from intergov.repos.message_updates import MessageUpdatesRepo
from intergov.loggers import logging
from intergov.domain.wire_protocols import generic_discrete as gd
from intergov.use_cases.route_to_channel import RouteToChannelUseCase

logger = logging.getLogger('multichannel_router')


class MultichannelWorker(object):
    """
    Iterate over the RouteToChannelUseCase.
    """

    ROUTING_TABLE = env_json("IGL_MCHR_ROUTING_TABLE", default=[])

    def _prepare_outbox_repo(self, conf):
        outbox_repo_conf = env_postgres_config('PROC_BCH_OUTBOX')
        if conf:
            outbox_repo_conf.update(conf)
        self.outbox_repo = ApiOutboxRepo(outbox_repo_conf)
Beispiel #11
0
import time
import requests
from http import HTTPStatus

from intergov.conf import env_queue_config
from intergov.repos.channel_pending_message import ChannelPendingMessageRepo
from intergov.repos.message_updates import MessageUpdatesRepo
from intergov.channels.discrete_generic_memory import DiscreteGenericMemoryChannel
from intergov.domain.wire_protocols import generic_discrete as gd
from intergov.loggers import logging


logger = logging.getLogger('channel_poller_worker')


class ChannelPollerWorker:

    def _prepare_channel_pending_message_repo(self, conf):
        channel_pending_message_repo_conf = env_queue_config('PROC_BCH_CHANNEL_PENDING_MESSAGE')
        if conf:
            channel_pending_message_repo_conf.update(conf)
        self.channel_pending_message_repo = ChannelPendingMessageRepo(channel_pending_message_repo_conf)

    def _prepare_message_updates_repo(self, conf):
        message_updates_repo_conf = env_queue_config('BCH_MESSAGE_UPDATES')
        if conf:
            message_updates_repo_conf.update(conf)
        self.message_updates_repo = MessageUpdatesRepo(message_updates_repo_conf)

    def _poll_message(self, queue_job):
        result = None
Beispiel #12
0
import requests
from http import HTTPStatus

from events_pb2 import EventSubscription, EventFilter, EventList
from client_event_pb2 import ClientEventsSubscribeRequest, ClientEventsSubscribeResponse
from network_pb2 import PingRequest
from validator_pb2 import Message
import transaction_receipt_pb2

from intergov.domain.wire_protocols import generic_discrete as gd
from intergov.conf import env
from intergov.loggers import logging
from intergov.processors.common.env import (
    MESSAGE_RX_API_ENDPOINT, )

logger = logging.getLogger('bch_observer')

DEFAULT_IGL_COUNTRY = 'AU'
IGL_COUNTRY = env('IGL_COUNTRY', default=None)
if IGL_COUNTRY is None:
    logger.warning(f'IGL_COUNTRY is undefined using {DEFAULT_IGL_COUNTRY}')
    IGL_COUNTRY = DEFAULT_IGL_COUNTRY

CONNECT_FQDN = env('CONNECT_FQDN', default="memory-validator-default")

# TODO: this is the creation of the subscription request, we probably want to create
# a list of subscriptions based on some configuration (per channel). things that vary
# would be the event_type and the match string (the match string is transaction processor specific)

TP_NAMESPACE = hashlib.sha512(
    'generic-discrete-message'.encode("utf-8")).hexdigest()[0:6]
Beispiel #13
0
import time
import requests
from http import HTTPStatus
from intergov.conf import env_queue_config
from intergov.repos.message_updates import MessageUpdatesRepo
from intergov.domain.wire_protocols import generic_discrete as gd
from intergov.loggers import logging
from intergov.processors.common.env import (MESSAGE_PATCH_API_ENDPOINT)

logger = logging.getLogger('channel_message_updater')


class MessageUpdater(object):
    """
    Iterate over message update jobs:

    * get a job from the queue
    * after some job validation, update the message using the API
    * if sucessful, delete the job from the queue
    * if unsucessful, increment retry counter and reschedule attempt
    """

    # TODO: FIXME: push business logic into a testable use_case object
    # maybe also put the "update job" into a request model
    # TODO: tar-pit algorithm on retrys?
    # (prevent thundering herd after outage)
    def _prepare_message_updates_repo(self, conf):
        message_updates_repo_conf = env_queue_config('BCH_MESSAGE_UPDATES')
        if conf:
            message_updates_repo_conf.update(conf)
        self.message_updates_repo = MessageUpdatesRepo(
Beispiel #14
0
Changes their status in the message_rx_api to "accepted"
Marks them as 'accepted' in the local postgres storage

Real worker would call real blockchain procedures instead.
"""
import random
import time

import requests

from intergov.conf import env, env_queue_config, env_postgres_config
from intergov.repos.api_outbox import ApiOutboxRepo
from intergov.repos.rejected_message import RejectedMessagesRepo
from intergov.loggers import logging

logger = logging.getLogger('loopback_bch_worker')


class LoopbackBlockchainWorker(object):

    REJECT_EACH = int(
        env('IGL_PROC_LOOPBACK_BCH_WORKER_REJECT_EACH', default=0))

    MESSAGE_PATCH_API_ENDPOINT = env(
        'IGL_PROC_BCH_MESSAGE_API_ENDPOINT',
        default='http://message_api:5101/message/{sender}:{sender_ref}')

    MESSAGE_RX_API_ENDPOINT = env(
        'IGL_PROC_BCH_MESSAGE_RX_API_URL',
        default='http://message_rx_api:5100/messages')
Beispiel #15
0
"""
import copy
import json
import random
import time

from intergov.conf import env, env_postgres_config, env_queue_config
from intergov.repos.api_outbox import ApiOutboxRepo
from intergov.repos.channel_pending_message import ChannelPendingMessageRepo
from intergov.repos.message_updates import MessageUpdatesRepo
from intergov.loggers import logging
from intergov.domain.wire_protocols import generic_discrete as gd
from intergov.use_cases.route_to_channel import RouteToChannelUseCase
from intergov.channels.discrete_generic_memory import DiscreteGenericMemoryChannel

logger = logging.getLogger('multichannel_bch_worker')

# this is a kludge
# we need some kind of configured registry
DEFAULT_CONFIG = [{
    'id': 'DiscreteGenericMemoryChannel',
    'type': DiscreteGenericMemoryChannel,
}]


class MultiChannelBlockchainWorker(object):
    """
    Iterate over the RouteToChannelUseCase.
    """
    def _prepare_config(self, config):
        if config: