示例#1
0
    def test_causal_dependencies(self):
        # Try to process an event that has unresolved causal dependencies.
        pipeline_id1 = 0
        pipeline_id2 = 1

        # Create two events, one has causal dependency on the other.
        process_class = ProcessApplication.mixin(self.infrastructure_class)
        core1 = process_class(
            name="core",
            # persist_event_type=ExampleAggregate.Created,
            persist_event_type=BaseAggregateRoot.Event,
            setup_table=True,
            pipeline_id=pipeline_id1,
        )
        core1.use_causal_dependencies = True

        kwargs = {}
        if self.infrastructure_class.is_constructed_with_session:
            # Needed for SQLAlchemy only.
            kwargs["session"] = core1.session

        core2 = process_class(name="core",
                              pipeline_id=pipeline_id2,
                              policy=example_policy,
                              **kwargs)
        core2.use_causal_dependencies = True

        # First event in pipeline 1.
        aggregate = ExampleAggregate.__create__()
        aggregate.__save__()

        # Second event in pipeline 2.
        # - it's important this is done in a policy so the causal dependencies are
        # identified
        core2.follow("core", core1.notification_log)
        core2.run()

        # Check the aggregate exists.
        self.assertTrue(aggregate.id in core1.repository)

        # Check the aggregate has been "moved on".
        aggregate = core1.repository[aggregate.id]
        self.assertTrue(aggregate.is_moved_on)
        self.assertTrue(aggregate.second_id)
        self.assertIn(aggregate.second_id, core1.repository)

        # Check the events have different pipeline IDs.
        aggregate_records = core1.event_store.record_manager.get_records(
            aggregate.id)
        second_entity_records = core1.event_store.record_manager.get_records(
            aggregate.second_id)

        self.assertEqual(2, len(aggregate_records))
        self.assertEqual(1, len(second_entity_records))

        self.assertEqual(pipeline_id1, aggregate_records[0].pipeline_id)
        self.assertEqual(pipeline_id2, aggregate_records[1].pipeline_id)
        self.assertEqual(pipeline_id2, second_entity_records[0].pipeline_id)

        # Check the causal dependencies have been constructed.
        # - the first 'Created' event doesn't have an causal dependencies
        self.assertFalse(aggregate_records[0].causal_dependencies)

        # - the second 'Created' event depends on the Created event in another pipeline.
        expect = [{"notification_id": 1, "pipeline_id": pipeline_id1}]
        actual = ObjectJSONDecoder().decode(
            second_entity_records[0].causal_dependencies)

        self.assertEqual(expect, actual)

        # - the 'AttributeChanged' event depends on the second Created,
        # which is in the same pipeline, so expect no causal dependencies.
        self.assertFalse(aggregate_records[1].causal_dependencies)

        # Setup downstream process.
        downstream1 = process_class(name="downstream",
                                    pipeline_id=pipeline_id1,
                                    policy=event_logging_policy,
                                    **kwargs)
        downstream1.follow("core", core1.notification_log)
        downstream2 = process_class(name="downstream",
                                    pipeline_id=pipeline_id2,
                                    policy=event_logging_policy,
                                    **kwargs)
        downstream2.follow("core", core2.notification_log)

        # Try to process pipeline 2, should fail due to causal dependency.
        with self.assertRaises(CausalDependencyFailed):
            downstream2.run()

        self.assertEqual(
            0,
            len(
                list(downstream1.event_store.record_manager.get_notifications(
                ))))
        self.assertEqual(
            0,
            len(
                list(downstream2.event_store.record_manager.get_notifications(
                ))))

        # Try to process pipeline 1, should work.
        downstream1.run()

        self.assertEqual(
            1,
            len(
                list(downstream1.event_store.record_manager.get_notifications(
                ))))
        self.assertEqual(
            0,
            len(
                list(downstream2.event_store.record_manager.get_notifications(
                ))))

        # Try again to process pipeline 2, should work this time.
        downstream2.run()

        self.assertEqual(
            1,
            len(
                list(downstream1.event_store.record_manager.get_notifications(
                ))))
        self.assertEqual(
            2,
            len(
                list(downstream2.event_store.record_manager.get_notifications(
                ))))

        core1.close()
        core2.close()
        downstream1.close()
        downstream2.close()
 def setUp(self):
     self.encoder = ObjectJSONEncoder(sort_keys=True)
     self.decoder = ObjectJSONDecoder()
示例#3
0
    def test_decode(self):
        decoder = ObjectJSONDecoder()
        self.assertEqual(decoder.decode('1'), 1)

        value = '{"ISO8601_datetime": "2011-01-01T01:01:01.000000"}'
        expect = datetime.datetime(2011, 1, 1, 1, 1, 1)
        self.assertEqual(decoder.decode(value), expect)

        value = '{"ISO8601_datetime": "2011-01-01T01:01:01.000000+0000"}'
        expect = datetime.datetime(2011, 1, 1, 1, 1, 1, tzinfo=utc_timezone)
        self.assertEqual(decoder.decode(value), expect)

        value = '{"ISO8601_date": "2011-01-01"}'
        expect = datetime.date(2011, 1, 1)
        self.assertEqual(decoder.decode(value), expect)

        value = '{"UUID": "6ba7b8119dad11d180b400c04fd430c8"}'
        expect = NAMESPACE_URL
        self.assertEqual(decoder.decode(value), expect)

        value = '{"ISO8601_time": "23:59:59.123456"}'
        expect = datetime.time(23, 59, 59, 123456)
        self.assertEqual(decoder.decode(value), expect)

        value = '{"__decimal__": "59.123456"}'
        expect = Decimal('59.123456')
        self.assertEqual(decoder.decode(value), expect)

        value = '{"__deque__": []}'
        expect = deque()
        self.assertEqual(decoder.decode(value), expect)

        value = (
            '{"__class__": {"state": {"a": {"UUID": "6ba7b8119dad11d180b400c04fd430c8"}}, '
            '"topic": "eventsourcing.tests.test_transcoding#Object"}}')
        expect = Object(NAMESPACE_URL)
        self.assertEqual(decoder.decode(value), expect)

        # Check raises ValueError when JSON string is invalid.
        with self.assertRaises(ValueError):
            decoder.decode('{')
示例#4
0
 def __init__(self):
     self.channel = None
     self.json_encoder = ObjectJSONEncoder()
     self.json_decoder = ObjectJSONDecoder()
示例#5
0
class ProcessorClient(object):
    def __init__(self):
        self.channel = None
        self.json_encoder = ObjectJSONEncoder()
        self.json_decoder = ObjectJSONDecoder()

    def connect(self, address, timeout=5):
        """
        Connect to client to server at given address.

        Calls ping() until it gets a response, or timeout is reached.
        """
        self.close()
        self.channel = grpc.insecure_channel(address)
        self.stub = ProcessorStub(self.channel)

        timer_started = datetime.now()
        while True:
            # Ping until get a response.
            try:
                self.ping()
            except _InactiveRpcError:
                if timeout is not None:
                    timer_duration = (datetime.now() -
                                      timer_started).total_seconds()
                    if timer_duration > 15:
                        raise Exception("Timed out trying to connect to %s" %
                                        address)
                else:
                    continue
            else:
                break

    # def __enter__(self):
    #     return self
    #
    # def __exit__(self, exc_type, exc_val, exc_tb):
    #     self.close()
    #
    def close(self):
        """
        Closes the client's GPRC channel.
        """
        if self.channel is not None:
            self.channel.close()

    def ping(self):
        """
        Sends a Ping request to the server.
        """
        request = Empty()
        response = self.stub.Ping(request, timeout=5)

    # def follow(self, upstream_name, upstream_address):
    #     request = FollowRequest(
    #         upstream_name=upstream_name, upstream_address=upstream_address
    #     )
    #     response = self.stub.Follow(request, timeout=5,)

    def prompt(self, upstream_name):
        """
        Prompts downstream server with upstream name, so that downstream
        process and promptly pull new notifications from upstream process.
        """
        request = PromptRequest(upstream_name=upstream_name)
        response = self.stub.Prompt(request, timeout=5)

    def get_notifications(self, section_id):
        """
        Gets a section of event notifications from server.
        """
        request = NotificationsRequest(section_id=section_id)
        notifications_reply = self.stub.GetNotifications(request, timeout=5)
        assert isinstance(notifications_reply, NotificationsReply)
        return notifications_reply.section

    def lead(self, application_name, address):
        """
        Requests a process to connect and then send prompts to given address.
        """
        request = LeadRequest(downstream_name=application_name,
                              downstream_address=address)
        response = self.stub.Lead(request, timeout=5)

    def call_application(self, method_name, *args, **kwargs):
        """
        Calls named method on server's application with given args.
        """
        request = CallRequest(
            method_name=method_name,
            args=self.json_encoder.encode(args),
            kwargs=self.json_encoder.encode(kwargs),
        )
        response = self.stub.CallApplicationMethod(request, timeout=5)
        return self.json_decoder.decode(response.data)
示例#6
0
    def __init__(
        self,
        application_topic,
        pipeline_id,
        infrastructure_topic,
        setup_table,
        address,
        upstreams,
        downstreams,
        push_prompt_interval,
    ):
        super(ProcessorServer, self).__init__()

        # Make getting notifications more efficient.
        notificationlog.USE_REGULAR_SECTIONS = False
        notificationlog.DEFAULT_SECTION_SIZE = 100

        self.has_been_stopped = Event()
        signal(SIGINT, self.stop)
        self.application_class: Type[ProcessApplication] = resolve_topic(
            application_topic)
        self.pipeline_id = pipeline_id
        self.application_name = self.application_class.create_name()
        infrastructure_class: Type[
            ApplicationWithConcreteInfrastructure] = resolve_topic(
                infrastructure_topic)
        self.application = self.application_class.mixin(
            infrastructure_class=infrastructure_class)(
                pipeline_id=self.pipeline_id, setup_table=setup_table)
        self.address = address
        self.json_encoder = ObjectJSONEncoder()
        self.json_decoder = ObjectJSONDecoder()
        self.upstreams = upstreams
        self.downstreams = downstreams
        self.prompt_events = {}
        self.push_prompt_interval = push_prompt_interval

        self.notification_log_view = NotificationLogView(
            self.application.notification_log,
            json_encoder=ObjectJSONEncoder(),
        )
        for upstream_name in self.upstreams:
            self.prompt_events[upstream_name] = Event()
            # self.prompt_events[upstream_name].set()

        self.downstream_prompt_event = Event()
        subscribe(self._set_downstream_prompt_event, is_prompt_to_pull)

        self.serve()

        self.clients: Dict[str, ProcessorClient] = {}
        self.clients_lock = Lock()
        start_client_threads = []
        remotes = {}
        remotes.update(self.upstreams)
        remotes.update(self.downstreams)
        for name, address in remotes.items():
            thread = StartClient(self.clients, name, address)
            thread.setDaemon(True)
            thread.start()
            start_client_threads.append(thread)
        for thread in start_client_threads:
            thread.join()
            # logging.info("%s connected to %s" % (self.application_name, thread.name))

        self.push_prompts_thread = Thread(target=self._push_prompts)
        self.push_prompts_thread.setDaemon(True)
        self.push_prompts_thread.start()

        # self.count_of_events = 0

        self.pull_notifications_threads = {}
        self.unprocessed_domain_event_queue = Queue()
        for upstream_name, upstream_address in self.upstreams.items():
            thread = PullNotifications(
                prompt_event=self.prompt_events[upstream_name],
                reader=NotificationLogReader(
                    RemoteNotificationLog(
                        client=self.clients[upstream_name],
                        json_decoder=ObjectJSONDecoder(),
                        section_size=self.application.notification_log.
                        section_size,
                    )),
                process_application=self.application,
                event_queue=self.unprocessed_domain_event_queue,
                upstream_name=upstream_name,
                has_been_stopped=self.has_been_stopped,
            )
            thread.setDaemon(True)
            self.pull_notifications_threads[upstream_name] = thread

        self.process_events_thread = Thread(target=self._process_events)
        self.process_events_thread.setDaemon(True)
        self.process_events_thread.start()

        # Start the threads.
        for thread in self.pull_notifications_threads.values():
            thread.start()

        # Wait for termination.
        self.wait_for_termination()
示例#7
0
class ProcessorServer(ProcessorServicer):
    def __init__(
        self,
        application_topic,
        pipeline_id,
        infrastructure_topic,
        setup_table,
        address,
        upstreams,
        downstreams,
        push_prompt_interval,
    ):
        super(ProcessorServer, self).__init__()

        # Make getting notifications more efficient.
        notificationlog.USE_REGULAR_SECTIONS = False
        notificationlog.DEFAULT_SECTION_SIZE = 100

        self.has_been_stopped = Event()
        signal(SIGINT, self.stop)
        self.application_class: Type[ProcessApplication] = resolve_topic(
            application_topic)
        self.pipeline_id = pipeline_id
        self.application_name = self.application_class.create_name()
        infrastructure_class: Type[
            ApplicationWithConcreteInfrastructure] = resolve_topic(
                infrastructure_topic)
        self.application = self.application_class.mixin(
            infrastructure_class=infrastructure_class)(
                pipeline_id=self.pipeline_id, setup_table=setup_table)
        self.address = address
        self.json_encoder = ObjectJSONEncoder()
        self.json_decoder = ObjectJSONDecoder()
        self.upstreams = upstreams
        self.downstreams = downstreams
        self.prompt_events = {}
        self.push_prompt_interval = push_prompt_interval

        self.notification_log_view = NotificationLogView(
            self.application.notification_log,
            json_encoder=ObjectJSONEncoder(),
        )
        for upstream_name in self.upstreams:
            self.prompt_events[upstream_name] = Event()
            # self.prompt_events[upstream_name].set()

        self.downstream_prompt_event = Event()
        subscribe(self._set_downstream_prompt_event, is_prompt_to_pull)

        self.serve()

        self.clients: Dict[str, ProcessorClient] = {}
        self.clients_lock = Lock()
        start_client_threads = []
        remotes = {}
        remotes.update(self.upstreams)
        remotes.update(self.downstreams)
        for name, address in remotes.items():
            thread = StartClient(self.clients, name, address)
            thread.setDaemon(True)
            thread.start()
            start_client_threads.append(thread)
        for thread in start_client_threads:
            thread.join()
            # logging.info("%s connected to %s" % (self.application_name, thread.name))

        self.push_prompts_thread = Thread(target=self._push_prompts)
        self.push_prompts_thread.setDaemon(True)
        self.push_prompts_thread.start()

        # self.count_of_events = 0

        self.pull_notifications_threads = {}
        self.unprocessed_domain_event_queue = Queue()
        for upstream_name, upstream_address in self.upstreams.items():
            thread = PullNotifications(
                prompt_event=self.prompt_events[upstream_name],
                reader=NotificationLogReader(
                    RemoteNotificationLog(
                        client=self.clients[upstream_name],
                        json_decoder=ObjectJSONDecoder(),
                        section_size=self.application.notification_log.
                        section_size,
                    )),
                process_application=self.application,
                event_queue=self.unprocessed_domain_event_queue,
                upstream_name=upstream_name,
                has_been_stopped=self.has_been_stopped,
            )
            thread.setDaemon(True)
            self.pull_notifications_threads[upstream_name] = thread

        self.process_events_thread = Thread(target=self._process_events)
        self.process_events_thread.setDaemon(True)
        self.process_events_thread.start()

        # Start the threads.
        for thread in self.pull_notifications_threads.values():
            thread.start()

        # Wait for termination.
        self.wait_for_termination()

    def _set_downstream_prompt_event(self, event):
        # logging.info(
        #     "Setting downstream prompt event on %s for %s"
        #     % (self.application_name, event)
        # )
        self.downstream_prompt_event.set()

    def _push_prompts(self) -> None:
        # logging.info("Started push prompts thread")
        while not self.has_been_stopped.is_set():
            try:
                self.__push_prompts()
                sleep(self.push_prompt_interval)
            except Exception as e:
                if not self.has_been_stopped.is_set():
                    logging.error(traceback.format_exc())
                    logging.error(
                        "Continuing after error in 'push prompts' thread: %s",
                        e)
                    sleep(1)

    def __push_prompts(self):
        self.downstream_prompt_event.wait()
        self.downstream_prompt_event.clear()
        # logging.info("Pushing prompts from %s" % self.application_name)
        for downstream_name in self.downstreams:
            client = self.clients[downstream_name]
            if not self.has_been_stopped.is_set():
                client.prompt(self.application_name)

    def _process_events(self) -> None:
        while not self.has_been_stopped.is_set():
            try:
                self.__process_events()
            except Exception as e:
                logging.error(traceback.format_exc())
                logging.error(
                    "Continuing after error in 'process events' thread:", e)
                sleep(1)

    def __process_events(self):
        unprocessed_item = self.unprocessed_domain_event_queue.get()
        self.unprocessed_domain_event_queue.task_done()
        if unprocessed_item is None:
            return
        else:
            # Process domain event.
            domain_event, notification_id, upstream_name = unprocessed_item
            # logging.info("Unprocessed event: %s" % domain_event)
            new_events, new_records = self.application.process_upstream_event(
                domain_event, notification_id, upstream_name)

            # Publish a prompt if there are new notifications.
            if any([event.__notifiable__ for event in new_events]):
                self.application.publish_prompt()

    def serve(self):
        """
        Starts gRPC server.
        """
        self.executor = futures.ThreadPoolExecutor(max_workers=10)
        self.server = grpc.server(self.executor)
        # logging.info(self.application_class)
        add_ProcessorServicer_to_server(self, self.server)
        self.server.add_insecure_port(self.address)
        self.server.start()

    def wait_for_termination(self):
        """
        Runs until termination of process.
        """
        self.server.wait_for_termination()

    def Ping(self, request, context):
        return Empty()

    # def Follow(self, request, context):
    #     upstream_name = request.upstream_name
    #     upstream_address = request.upstream_address
    #     self.follow(upstream_name, upstream_address)
    #     return Empty()
    #
    # def follow(self, upstream_name, upstream_address):
    #     """"""
    #     # logging.debug("%s is following %s" % (self.application_name, upstream_name))
    #     self.clients[upstream_name].lead(self.application_name, self.address)

    def Lead(self, request, context):
        downstream_name = request.downstream_name
        downstream_address = request.downstream_address
        self.lead(downstream_name, downstream_address)
        return Empty()

    def lead(self, downstream_name, downstream_address):
        """
        Starts client and registers downstream to receive prompts.
        """
        # logging.debug("%s is leading %s" % (self.application_name, downstream_name))
        thread = StartClient(self.clients, downstream_name, downstream_address)
        thread.setDaemon(True)
        thread.start()
        thread.join()
        if thread.error:
            raise Exception(
                "Couldn't lead '%s' on address '%s': %s" %
                (downstream_name, downstream_address, thread.error))
        else:
            self.downstreams[downstream_name] = downstream_address

    def start_client(self, name, address):
        """
        Starts client connected to given address.
        """
        if name not in self.clients:
            self.clients[name] = ProcessorClient()
            self.clients[name].connect(address)

    def Prompt(self, request, context):
        upstream_name = request.upstream_name
        self.prompt(upstream_name)
        return Empty()

    def prompt(self, upstream_name):
        """
        Set prompt event for upstream name.
        """
        self.prompt_events[upstream_name].set()

    def GetNotifications(self, request, context):
        section_id = request.section_id
        section = self.get_notification_log_section(section_id)
        return NotificationsReply(section=section)

    def get_notification_log_section(self, section_id):
        """
        Returns section for given section ID.
        """
        return self.notification_log_view.present_resource(
            section_id=section_id)

    def CallApplicationMethod(self, request, context):
        method_name = request.method_name
        # logging.info("Call application method: %s" % method_name)
        args = self.json_decoder.decode(request.args)
        kwargs = self.json_decoder.decode(request.kwargs)
        method = getattr(self.application, method_name)
        return_value = method(*args, **kwargs)
        return CallReply(data=self.json_encoder.encode(return_value))

    def stop(self, *args):
        """
        Stops the gRPC server.
        """
        # logging.debug("Stopping....")
        self.has_been_stopped.set()
        self.server.stop(grace=1)