def simple_app(environ, start_response):
            """Simple WSGI application."""
            setup_testing_defaults(environ)

            # Identify log and section from request.
            path_info = environ["PATH_INFO"]
            try:
                section_id = path_info.strip("/").split("/")[-1]
            except ValueError:
                # Start response.
                status = "404 Not Found"
                headers = [("Content-type", "text/plain; charset=utf-8")]
                start_response(status, headers)
                return []

            # Select the notification log.
            notification_log = self.create_notification_log(section_size)

            # Get serialized section.
            json_encoder = ObjectJSONEncoder(separators=JSON_SEPARATORS)
            view = NotificationLogView(notification_log, json_encoder)

            resource = view.present_resource(section_id)
            # Todo: Maybe redirect if the section ID is a mismatch, so
            # the URL is good for cacheing.

            # Start response.
            status = "200 OK"
            headers = [("Content-type", "text/plain; charset=utf-8")]
            start_response(status, headers)

            # Return a list of lines.
            return [(line + "\n").encode("utf8")
                    for line in resource.split("\n")]
Esempio n. 2
0
def hash_object(json_encoder: ObjectJSONEncoder, obj: dict) -> str:
    """
    Calculates SHA-256 hash of JSON encoded 'obj'.

    :param json_encoder: JSON encoder object.
    :param obj: Object to be hashed.
    :return: SHA-256 as hexadecimal string.
    :rtype str
    """
    s = json_encoder.encode((obj, SALT_FOR_DATA_INTEGRITY))
    return hashlib.sha256(s).hexdigest()
Esempio n. 3
0
 def setUp(self):
     self.encoder = ObjectJSONEncoder(sort_keys=True)
     self.decoder = ObjectJSONDecoder()
Esempio n. 4
0
class DomainEvent(Upcastable, ActualOccasion, Generic[TEntity]):
    """
    Base class for domain model events.

    Implements methods to make instances read-only,
    comparable for equality in Python, and have
    recognisable representations.Custom
    To make domain events hashable, this class also
    implements a method to create a cryptographic hash
    of the state of the event.
    """

    __json_encoder_v2__ = ObjectJSONEncoder(sort_keys=True)
    __json_encoder_v1__ = transcoding_v1.ObjectJSONEncoder(sort_keys=True)
    __notifiable__ = True

    def __init__(self, **kwargs: Any):
        """
        Initialises event attribute values directly from constructor kwargs.
        """
        super().__init__()
        self.__dict__.update(kwargs)

    def __repr__(self) -> str:
        """
        Creates a string representing the type and attribute values of the event.

        :rtype: str
        """
        sorted_items = tuple(sorted(self.__dict__.items()))
        args_strings = ("{0}={1!r}".format(*item) for item in sorted_items)
        args_string = ", ".join(args_strings)
        return "{}({})".format(self.__class__.__qualname__, args_string)

    def __mutate__(self, obj: Optional[TEntity]) -> Optional[TEntity]:
        """
        Updates 'obj' with values from 'self'.

        Calls the 'mutate()' method.

        Can be extended, but subclasses must call super
        and return an object to their caller.

        :param obj: object (normally a domain entity) to be mutated
        :return: mutated object
        """
        if obj is not None:
            self.mutate(obj)
        return obj

    def mutate(self, obj: TEntity) -> None:
        """
        Updates ("mutates") given 'obj'.

        Intended to be overridden by subclasses, as the most
        concise way of coding a default projection of the event
        (for example into the state of a domain entity).

        The advantage of implementing a default projection
        using this method rather than __mutate__ is that you
        don't need to call super or return a value.

        :param obj: domain entity to be mutated
        """

    def __setattr__(self, key: Any, value: Any) -> None:
        """
        Inhibits event attributes from being updated by assignment.
        """
        raise AttributeError("DomainEvent attributes are read-only")

    def __eq__(self, other: object) -> bool:
        """
        Tests for equality of two event objects.

        :rtype: bool
        """
        return isinstance(other,
                          DomainEvent) and self.__hash__() == other.__hash__()

    # Todo: Do we need this in Python 3?
    def __ne__(self, other: object) -> bool:
        """
        Negates the equality test.

        :rtype: bool
        """
        return not (self == other)

    def __hash__(self) -> int:
        """
        Computes a Python integer hash for an event.

        Supports Python equality and inequality comparisons.

        :return: Python integer hash
        :rtype: int
        """
        attrs = self.__dict__.copy()

        # Involve the topic in the hash, so that different types
        # with same attribute values have different hash values.
        attrs["__event_topic__"] = get_topic(type(self))

        # Calculate the cryptographic hash of the event.
        sha256_hash = self.__hash_object_v2__(attrs)

        # Return the Python hash of the cryptographic hash.
        return hash(sha256_hash)

    @classmethod
    def __hash_object_v2__(cls, obj: dict) -> str:
        """
        Calculates SHA-256 hash of JSON encoded 'obj'.

        :param obj: Object to be hashed.
        :return: SHA-256 as hexadecimal string.
        :rtype: str
        """
        return hash_object(cls.__json_encoder_v2__, obj)

    @classmethod
    def __hash_object_v1__(cls, obj: dict) -> str:
        """
        Calculates SHA-256 hash of JSON encoded 'obj'.

        :param obj: Object to be hashed.
        :return: SHA-256 as hexadecimal string.
        :rtype: str
        """
        return hash_object(cls.__json_encoder_v1__, obj)
Esempio n. 5
0
    def test_encode(self):
        encoder = ObjectJSONEncoder()

        value = 1
        expect = '1'
        self.assertEqual(encoder.encode(value), expect)

        value = datetime.datetime(2011, 1, 1, 1, 1, 1)
        expect = '{"ISO8601_datetime": "2011-01-01T01:01:01.000000"}'
        self.assertEqual(encoder.encode(value), expect)

        value = datetime.datetime(2011, 1, 1, 1, 1, 1, tzinfo=utc_timezone)
        expect = '{"ISO8601_datetime": "2011-01-01T01:01:01.000000+0000"}'
        self.assertEqual(encoder.encode(value), expect)

        value = datetime.date(2011, 1, 1)
        expect = '{"ISO8601_date": "2011-01-01"}'
        self.assertEqual(expect, encoder.encode(value))

        value = datetime.time(23, 59, 59, 123456)
        expect = '{"ISO8601_time": "23:59:59.123456"}'
        self.assertEqual(encoder.encode(value), expect)

        value = Decimal('59.123456')
        expect = '{"__decimal__": "59.123456"}'
        self.assertEqual(encoder.encode(value), expect)

        value = NAMESPACE_URL
        expect = '{"UUID": "6ba7b8119dad11d180b400c04fd430c8"}'
        self.assertEqual(encoder.encode(value), expect)

        value = Object(NAMESPACE_URL)
        self.assertEqual(
            value.__class__.__module__, 'eventsourcing.tests.test_transcoding',
            "Module does not have full path, Python 2.7 issue with unittest"
            " (need to run test from root dir")
        expect = (
            '{"__class__": {"state": {"a": {"UUID": "6ba7b8119dad11d180b400c04fd430c8"}}, '
            '"topic": "eventsourcing.tests.test_transcoding#Object"}}')
        self.check_encoded_value(encoder, value, expect)

        value = deque()
        expect = '{"__deque__": []}'
        self.assertEqual(encoder.encode(value), expect)

        # Check defers to base class to raise TypeError.
        # - a type isn't supported at the moment, hence this test works
        with self.assertRaises(TypeError):
            encoder.encode(object)
Esempio n. 6
0
 def __init__(self):
     self.channel = None
     self.json_encoder = ObjectJSONEncoder()
     self.json_decoder = ObjectJSONDecoder()
Esempio n. 7
0
class ProcessorClient(object):
    def __init__(self):
        self.channel = None
        self.json_encoder = ObjectJSONEncoder()
        self.json_decoder = ObjectJSONDecoder()

    def connect(self, address, timeout=5):
        """
        Connect to client to server at given address.

        Calls ping() until it gets a response, or timeout is reached.
        """
        self.close()
        self.channel = grpc.insecure_channel(address)
        self.stub = ProcessorStub(self.channel)

        timer_started = datetime.now()
        while True:
            # Ping until get a response.
            try:
                self.ping()
            except _InactiveRpcError:
                if timeout is not None:
                    timer_duration = (datetime.now() -
                                      timer_started).total_seconds()
                    if timer_duration > 15:
                        raise Exception("Timed out trying to connect to %s" %
                                        address)
                else:
                    continue
            else:
                break

    # def __enter__(self):
    #     return self
    #
    # def __exit__(self, exc_type, exc_val, exc_tb):
    #     self.close()
    #
    def close(self):
        """
        Closes the client's GPRC channel.
        """
        if self.channel is not None:
            self.channel.close()

    def ping(self):
        """
        Sends a Ping request to the server.
        """
        request = Empty()
        response = self.stub.Ping(request, timeout=5)

    # def follow(self, upstream_name, upstream_address):
    #     request = FollowRequest(
    #         upstream_name=upstream_name, upstream_address=upstream_address
    #     )
    #     response = self.stub.Follow(request, timeout=5,)

    def prompt(self, upstream_name):
        """
        Prompts downstream server with upstream name, so that downstream
        process and promptly pull new notifications from upstream process.
        """
        request = PromptRequest(upstream_name=upstream_name)
        response = self.stub.Prompt(request, timeout=5)

    def get_notifications(self, section_id):
        """
        Gets a section of event notifications from server.
        """
        request = NotificationsRequest(section_id=section_id)
        notifications_reply = self.stub.GetNotifications(request, timeout=5)
        assert isinstance(notifications_reply, NotificationsReply)
        return notifications_reply.section

    def lead(self, application_name, address):
        """
        Requests a process to connect and then send prompts to given address.
        """
        request = LeadRequest(downstream_name=application_name,
                              downstream_address=address)
        response = self.stub.Lead(request, timeout=5)

    def call_application(self, method_name, *args, **kwargs):
        """
        Calls named method on server's application with given args.
        """
        request = CallRequest(
            method_name=method_name,
            args=self.json_encoder.encode(args),
            kwargs=self.json_encoder.encode(kwargs),
        )
        response = self.stub.CallApplicationMethod(request, timeout=5)
        return self.json_decoder.decode(response.data)
Esempio n. 8
0
    def __init__(
        self,
        application_topic,
        pipeline_id,
        infrastructure_topic,
        setup_table,
        address,
        upstreams,
        downstreams,
        push_prompt_interval,
    ):
        super(ProcessorServer, self).__init__()

        # Make getting notifications more efficient.
        notificationlog.USE_REGULAR_SECTIONS = False
        notificationlog.DEFAULT_SECTION_SIZE = 100

        self.has_been_stopped = Event()
        signal(SIGINT, self.stop)
        self.application_class: Type[ProcessApplication] = resolve_topic(
            application_topic)
        self.pipeline_id = pipeline_id
        self.application_name = self.application_class.create_name()
        infrastructure_class: Type[
            ApplicationWithConcreteInfrastructure] = resolve_topic(
                infrastructure_topic)
        self.application = self.application_class.mixin(
            infrastructure_class=infrastructure_class)(
                pipeline_id=self.pipeline_id, setup_table=setup_table)
        self.address = address
        self.json_encoder = ObjectJSONEncoder()
        self.json_decoder = ObjectJSONDecoder()
        self.upstreams = upstreams
        self.downstreams = downstreams
        self.prompt_events = {}
        self.push_prompt_interval = push_prompt_interval

        self.notification_log_view = NotificationLogView(
            self.application.notification_log,
            json_encoder=ObjectJSONEncoder(),
        )
        for upstream_name in self.upstreams:
            self.prompt_events[upstream_name] = Event()
            # self.prompt_events[upstream_name].set()

        self.downstream_prompt_event = Event()
        subscribe(self._set_downstream_prompt_event, is_prompt_to_pull)

        self.serve()

        self.clients: Dict[str, ProcessorClient] = {}
        self.clients_lock = Lock()
        start_client_threads = []
        remotes = {}
        remotes.update(self.upstreams)
        remotes.update(self.downstreams)
        for name, address in remotes.items():
            thread = StartClient(self.clients, name, address)
            thread.setDaemon(True)
            thread.start()
            start_client_threads.append(thread)
        for thread in start_client_threads:
            thread.join()
            # logging.info("%s connected to %s" % (self.application_name, thread.name))

        self.push_prompts_thread = Thread(target=self._push_prompts)
        self.push_prompts_thread.setDaemon(True)
        self.push_prompts_thread.start()

        # self.count_of_events = 0

        self.pull_notifications_threads = {}
        self.unprocessed_domain_event_queue = Queue()
        for upstream_name, upstream_address in self.upstreams.items():
            thread = PullNotifications(
                prompt_event=self.prompt_events[upstream_name],
                reader=NotificationLogReader(
                    RemoteNotificationLog(
                        client=self.clients[upstream_name],
                        json_decoder=ObjectJSONDecoder(),
                        section_size=self.application.notification_log.
                        section_size,
                    )),
                process_application=self.application,
                event_queue=self.unprocessed_domain_event_queue,
                upstream_name=upstream_name,
                has_been_stopped=self.has_been_stopped,
            )
            thread.setDaemon(True)
            self.pull_notifications_threads[upstream_name] = thread

        self.process_events_thread = Thread(target=self._process_events)
        self.process_events_thread.setDaemon(True)
        self.process_events_thread.start()

        # Start the threads.
        for thread in self.pull_notifications_threads.values():
            thread.start()

        # Wait for termination.
        self.wait_for_termination()
Esempio n. 9
0
class ProcessorServer(ProcessorServicer):
    def __init__(
        self,
        application_topic,
        pipeline_id,
        infrastructure_topic,
        setup_table,
        address,
        upstreams,
        downstreams,
        push_prompt_interval,
    ):
        super(ProcessorServer, self).__init__()

        # Make getting notifications more efficient.
        notificationlog.USE_REGULAR_SECTIONS = False
        notificationlog.DEFAULT_SECTION_SIZE = 100

        self.has_been_stopped = Event()
        signal(SIGINT, self.stop)
        self.application_class: Type[ProcessApplication] = resolve_topic(
            application_topic)
        self.pipeline_id = pipeline_id
        self.application_name = self.application_class.create_name()
        infrastructure_class: Type[
            ApplicationWithConcreteInfrastructure] = resolve_topic(
                infrastructure_topic)
        self.application = self.application_class.mixin(
            infrastructure_class=infrastructure_class)(
                pipeline_id=self.pipeline_id, setup_table=setup_table)
        self.address = address
        self.json_encoder = ObjectJSONEncoder()
        self.json_decoder = ObjectJSONDecoder()
        self.upstreams = upstreams
        self.downstreams = downstreams
        self.prompt_events = {}
        self.push_prompt_interval = push_prompt_interval

        self.notification_log_view = NotificationLogView(
            self.application.notification_log,
            json_encoder=ObjectJSONEncoder(),
        )
        for upstream_name in self.upstreams:
            self.prompt_events[upstream_name] = Event()
            # self.prompt_events[upstream_name].set()

        self.downstream_prompt_event = Event()
        subscribe(self._set_downstream_prompt_event, is_prompt_to_pull)

        self.serve()

        self.clients: Dict[str, ProcessorClient] = {}
        self.clients_lock = Lock()
        start_client_threads = []
        remotes = {}
        remotes.update(self.upstreams)
        remotes.update(self.downstreams)
        for name, address in remotes.items():
            thread = StartClient(self.clients, name, address)
            thread.setDaemon(True)
            thread.start()
            start_client_threads.append(thread)
        for thread in start_client_threads:
            thread.join()
            # logging.info("%s connected to %s" % (self.application_name, thread.name))

        self.push_prompts_thread = Thread(target=self._push_prompts)
        self.push_prompts_thread.setDaemon(True)
        self.push_prompts_thread.start()

        # self.count_of_events = 0

        self.pull_notifications_threads = {}
        self.unprocessed_domain_event_queue = Queue()
        for upstream_name, upstream_address in self.upstreams.items():
            thread = PullNotifications(
                prompt_event=self.prompt_events[upstream_name],
                reader=NotificationLogReader(
                    RemoteNotificationLog(
                        client=self.clients[upstream_name],
                        json_decoder=ObjectJSONDecoder(),
                        section_size=self.application.notification_log.
                        section_size,
                    )),
                process_application=self.application,
                event_queue=self.unprocessed_domain_event_queue,
                upstream_name=upstream_name,
                has_been_stopped=self.has_been_stopped,
            )
            thread.setDaemon(True)
            self.pull_notifications_threads[upstream_name] = thread

        self.process_events_thread = Thread(target=self._process_events)
        self.process_events_thread.setDaemon(True)
        self.process_events_thread.start()

        # Start the threads.
        for thread in self.pull_notifications_threads.values():
            thread.start()

        # Wait for termination.
        self.wait_for_termination()

    def _set_downstream_prompt_event(self, event):
        # logging.info(
        #     "Setting downstream prompt event on %s for %s"
        #     % (self.application_name, event)
        # )
        self.downstream_prompt_event.set()

    def _push_prompts(self) -> None:
        # logging.info("Started push prompts thread")
        while not self.has_been_stopped.is_set():
            try:
                self.__push_prompts()
                sleep(self.push_prompt_interval)
            except Exception as e:
                if not self.has_been_stopped.is_set():
                    logging.error(traceback.format_exc())
                    logging.error(
                        "Continuing after error in 'push prompts' thread: %s",
                        e)
                    sleep(1)

    def __push_prompts(self):
        self.downstream_prompt_event.wait()
        self.downstream_prompt_event.clear()
        # logging.info("Pushing prompts from %s" % self.application_name)
        for downstream_name in self.downstreams:
            client = self.clients[downstream_name]
            if not self.has_been_stopped.is_set():
                client.prompt(self.application_name)

    def _process_events(self) -> None:
        while not self.has_been_stopped.is_set():
            try:
                self.__process_events()
            except Exception as e:
                logging.error(traceback.format_exc())
                logging.error(
                    "Continuing after error in 'process events' thread:", e)
                sleep(1)

    def __process_events(self):
        unprocessed_item = self.unprocessed_domain_event_queue.get()
        self.unprocessed_domain_event_queue.task_done()
        if unprocessed_item is None:
            return
        else:
            # Process domain event.
            domain_event, notification_id, upstream_name = unprocessed_item
            # logging.info("Unprocessed event: %s" % domain_event)
            new_events, new_records = self.application.process_upstream_event(
                domain_event, notification_id, upstream_name)

            # Publish a prompt if there are new notifications.
            if any([event.__notifiable__ for event in new_events]):
                self.application.publish_prompt()

    def serve(self):
        """
        Starts gRPC server.
        """
        self.executor = futures.ThreadPoolExecutor(max_workers=10)
        self.server = grpc.server(self.executor)
        # logging.info(self.application_class)
        add_ProcessorServicer_to_server(self, self.server)
        self.server.add_insecure_port(self.address)
        self.server.start()

    def wait_for_termination(self):
        """
        Runs until termination of process.
        """
        self.server.wait_for_termination()

    def Ping(self, request, context):
        return Empty()

    # def Follow(self, request, context):
    #     upstream_name = request.upstream_name
    #     upstream_address = request.upstream_address
    #     self.follow(upstream_name, upstream_address)
    #     return Empty()
    #
    # def follow(self, upstream_name, upstream_address):
    #     """"""
    #     # logging.debug("%s is following %s" % (self.application_name, upstream_name))
    #     self.clients[upstream_name].lead(self.application_name, self.address)

    def Lead(self, request, context):
        downstream_name = request.downstream_name
        downstream_address = request.downstream_address
        self.lead(downstream_name, downstream_address)
        return Empty()

    def lead(self, downstream_name, downstream_address):
        """
        Starts client and registers downstream to receive prompts.
        """
        # logging.debug("%s is leading %s" % (self.application_name, downstream_name))
        thread = StartClient(self.clients, downstream_name, downstream_address)
        thread.setDaemon(True)
        thread.start()
        thread.join()
        if thread.error:
            raise Exception(
                "Couldn't lead '%s' on address '%s': %s" %
                (downstream_name, downstream_address, thread.error))
        else:
            self.downstreams[downstream_name] = downstream_address

    def start_client(self, name, address):
        """
        Starts client connected to given address.
        """
        if name not in self.clients:
            self.clients[name] = ProcessorClient()
            self.clients[name].connect(address)

    def Prompt(self, request, context):
        upstream_name = request.upstream_name
        self.prompt(upstream_name)
        return Empty()

    def prompt(self, upstream_name):
        """
        Set prompt event for upstream name.
        """
        self.prompt_events[upstream_name].set()

    def GetNotifications(self, request, context):
        section_id = request.section_id
        section = self.get_notification_log_section(section_id)
        return NotificationsReply(section=section)

    def get_notification_log_section(self, section_id):
        """
        Returns section for given section ID.
        """
        return self.notification_log_view.present_resource(
            section_id=section_id)

    def CallApplicationMethod(self, request, context):
        method_name = request.method_name
        # logging.info("Call application method: %s" % method_name)
        args = self.json_decoder.decode(request.args)
        kwargs = self.json_decoder.decode(request.kwargs)
        method = getattr(self.application, method_name)
        return_value = method(*args, **kwargs)
        return CallReply(data=self.json_encoder.encode(return_value))

    def stop(self, *args):
        """
        Stops the gRPC server.
        """
        # logging.debug("Stopping....")
        self.has_been_stopped.set()
        self.server.stop(grace=1)