def test_event_subscriptions(test_domain): test_domain.register(UserEventHandler, aggregate_cls=User) engine = Engine(test_domain, test_mode=True) assert len(engine._subscriptions) == 1 assert fqn(UserEventHandler) in engine._subscriptions assert engine._subscriptions[fqn(UserEventHandler)].stream_name == "user"
def test_origin_stream_name_in_subscription(test_domain): test_domain.register(EmailEventHandler, aggregate_cls=User, source_stream="email") engine = Engine(test_domain, test_mode=True) assert len(engine._subscriptions) == 1 assert engine._subscriptions[fqn(EmailEventHandler)].stream_name == "user" assert engine._subscriptions[fqn( EmailEventHandler)].origin_stream_name == "email"
def command_handler_for( self, command: BaseCommand) -> Optional[BaseCommandHandler]: if self._command_streams is None: self._initialize_command_streams() stream_name = command.meta_.stream_name or ( command.meta_.aggregate_cls.meta_.stream_name if command.meta_.aggregate_cls else None) if not stream_name: return None handler_classes = self._command_streams.get(stream_name, set()) # No command handlers have been configured to run this command if len(handler_classes) == 0: return None # Ensure that a command has a unique handler across all handlers # FIXME Perform this check on domain spin-up? handler_methods = set() for handler_cls in handler_classes: try: handler_method = next( iter(handler_cls._handlers[fqn(command.__class__)])) handler_methods.add((handler_cls, handler_method)) except StopIteration: pass if len(handler_methods) > 1: raise NotSupportedError( f"Command {command.__class__.__name__} cannot be handled by multiple handlers" ) return next(iter(handler_methods))[0] if handler_methods else None
def process(self, command: BaseCommand, asynchronous: bool = True) -> Optional[Any]: """Process command and return results based on specified preference. By default, Protean does not return values after processing commands. This behavior can be overridden either by setting COMMAND_PROCESSING in config to "SYNC" or by specifying ``asynchronous=False`` when calling the domain's ``handle`` method. Args: command (BaseCommand): Command to process asynchronous (Boolean, optional): Specifies if the command should be processed asynchronously. Defaults to True. Returns: Optional[Any]: Returns either the command handler's return value or nothing, based on preference. """ position = self.event_store.store.append_command(command) if (not asynchronous or self.config["COMMAND_PROCESSING"] == CommandProcessing.SYNC.value): handler_class = self.command_handler_for(command) if handler_class: handler_method = next( iter(handler_class._handlers[fqn(command.__class__)])) handler_method(handler_class(), command) return position
def test_that_stream_name_overrides_the_derived_stream_name_from_aggregate_cls( test_domain, ): test_domain.register( EmailEventHandler, aggregate_cls=User, stream_name="identity", source_stream="email", ) engine = Engine(test_domain, test_mode=True) assert len(engine._subscriptions) == 1 assert engine._subscriptions[fqn( EmailEventHandler)].stream_name == "identity" assert engine._subscriptions[fqn( EmailEventHandler)].origin_stream_name == "email"
def last_event_of_type(self, event_cls: Type[BaseEvent], stream_name: str = None) -> BaseEvent: stream_name = stream_name or "$all" events = [ event for event in self.domain.event_store.store._read(stream_name) if event["type"] == fqn(event_cls) ] return Message.from_dict( events[-1]).to_object() if len(events) > 0 else None
async def test_message_filtering_for_event_handlers_with_defined_origin_stream( test_domain, ): test_domain.register(UserEventHandler, aggregate_cls=User) test_domain.register(EmailEventHandler, stream_name="email", source_stream="user") engine = Engine(test_domain, test_mode=True) email_event_handler_subscription = engine._subscriptions[fqn( EmailEventHandler)] identifier = str(uuid4()) user = User(id=identifier, email="*****@*****.**", name="John Doe") email = Email(id=identifier, email="*****@*****.**") # Construct 3 dummy messages and modify Sent message to have originated from the user stream messages = [ Message.to_aggregate_event_message( user, Registered(id=identifier, email="*****@*****.**", name="John Doe")), Message.to_aggregate_event_message( user, Activated(id=identifier, activated_at=datetime.utcnow())), Message.to_aggregate_event_message( email, Sent(email="*****@*****.**", sent_at=datetime.utcnow())), ] messages[2].metadata.origin_stream_name = f"user-{identifier}" # Mock `read` method and have it return the 3 messages mock_store_read = mock.Mock() mock_store_read.return_value = messages email_event_handler_subscription.store.read = mock_store_read filtered_messages = ( await email_event_handler_subscription.get_next_batch_of_messages()) assert len(filtered_messages) == 1 assert filtered_messages[0].type == fqn(Sent)
def test_reading_last_message(test_domain): identifier = str(uuid4()) event1 = Registered(id=identifier, email="*****@*****.**") user = User(**event1.to_dict()) test_domain.event_store.store.append_aggregate_event(user, event1) event2 = Activated(id=identifier) test_domain.event_store.store.append_aggregate_event(user, event2) for i in range(10): event = Renamed(id=identifier, name=f"John Doe {i}") test_domain.event_store.store.append_aggregate_event(user, event) # Reading by stream message = test_domain.event_store.store.read_last_message( f"user-{identifier}") assert message.type == fqn(Renamed) assert message.data["name"] == "John Doe 9"
def publish(self, event: BaseEvent) -> None: """Publish Events to all configured brokers. Args: event_or_command (BaseEvent): The Event object containing data to be pushed """ # Persist event in Message Store self.event_store.store.append_event(event) self.brokers.publish(event) if current_domain.config["EVENT_PROCESSING"] == EventProcessing.SYNC.value: # Consume events right-away handler_classes = current_domain.handlers_for(event) for handler_cls in handler_classes: handler_methods = ( handler_cls._handlers[fqn(event.__class__)] or handler_cls._handlers["$any"] ) for handler_method in handler_methods: handler_method(handler_cls(), event)
def events_of_type(self, event_cls: Type[BaseEvent], stream_name: str = None) -> List[BaseEvent]: """Read events of a specific type in a given stream. This is a utility method, especially useful for testing purposes, that retrives events of a specific type from the event store. If no stream is specified, events of the requested type will be retrieved from all streams. :param event_cls: Class of the event type to be retrieved :param stream_name: Stream from which events are to be retrieved :type event_cls: BaseEvent Class :type stream_name: String, optional, default is `None` :return: A list of events of `event_cls` type :rtype: list """ stream_name = stream_name or "$all" return [ Message.from_dict(event).to_object() for event in self.domain.event_store.store._read(stream_name) if event["type"] == fqn(event_cls) ]
def commit(self): # noqa: C901 # Raise error if there the Unit Of Work is not active logger.debug(f"Committing {self}...") if not self._in_progress: raise InvalidOperationError("UnitOfWork is not in progress") # Exit from Unit of Work _uow_context_stack.pop() # Commit and destroy session try: for _, session in self._sessions.items(): session.commit() # Push messages to all brokers # FIXME Send message to its designated broker? # FIXME Send messages through domain.brokers.publish? for message in self._messages_to_dispatch: for _, broker in self.domain.brokers.items(): broker.publish(message) self._messages_to_dispatch = [] # Empty after dispatch events = [] for item in self._seen: if item._events: if item.element_type == DomainObjects.EVENT_SOURCED_AGGREGATE: for event in item._events: current_domain.event_store.store.append_aggregate_event( item, event) events.append((item, event)) else: for event in item._events: current_domain.event_store.store.append_event( event) events.append((item, event)) item._events = [] # Iteratively consume all events produced in this session if current_domain.config[ "EVENT_PROCESSING"] == EventProcessing.SYNC.value: # Handover events to process instantly for _, event in events: handler_classes = current_domain.handlers_for(event) for handler_cls in handler_classes: handler_methods = (handler_cls._handlers[fqn( event.__class__)] or handler_cls._handlers["$any"]) for handler_method in handler_methods: handler_method(handler_cls(), event) logger.debug("Commit Successful") except ValueError as exc: logger.error(str(exc)) self.rollback() # Extact message based on message store platform in use if str(exc).startswith("P0001-ERROR"): msg = str(exc).split("P0001-ERROR: ")[1] else: msg = str(exc) raise ExpectedVersionError(msg) from None except Exception as exc: logger.error( f"Error during Commit: {str(exc)}. Rolling back Transaction..." ) self.rollback() raise ValidationError( {"_entity": [f"Error during Data Commit: - {repr(exc)}"]}) self._reset()
def _get_element_by_class(self, element_types, element_cls): """Fetch Domain record with Element class details""" element_qualname = fqn(element_cls) return self._get_element_by_fully_qualified_name( element_types, element_qualname)
def _register_element(self, element_type, element_cls, **kwargs): # noqa: C901 """Register class into the domain""" # Check if `element_cls` is already a subclass of the Element Type # which would be the case in an explicit declaration like `class Account(BaseEntity):` # # We will need to construct a class derived from the right base class # if the Element was specified through annotation, like so: # # ``` # @Entity # class Account: # ``` factory = self.factory_for(element_type) new_cls = factory(element_cls, **kwargs) if element_type == DomainObjects.MODEL: # Remember model association with aggregate/entity class, for easy fetching self._models[fqn(new_cls.meta_.entity_cls)] = new_cls # Register element with domain self._domain_registry.register_element(new_cls) # Resolve or record elements to be resolved if has_fields(new_cls): for _, field_obj in declared_fields(new_cls).items(): if isinstance(field_obj, (HasOne, HasMany, Reference)) and isinstance( field_obj.to_cls, str): try: # Attempt to resolve the destination class by querying the active domain # if a domain is active. Otherwise, track it as part of `_pending_class_resolutions` # for later resolution. if has_domain_context() and current_domain == self: to_cls = fetch_element_cls_from_registry( field_obj.to_cls, (DomainObjects.AGGREGATE, DomainObjects.ENTITY), ) field_obj._resolve_to_cls(to_cls, new_cls) else: self._pending_class_resolutions[ field_obj.to_cls].append((field_obj, new_cls)) except ConfigurationError: # Class was not found yet, so we track it for future resolution self._pending_class_resolutions[ field_obj.to_cls].append((field_obj, new_cls)) # Resolve known pending references by full name or class name immediately. # Otherwise, references will be resolved automatically on domain activation. # # This comes handy when we are manually registering classes one after the other. # Since the domain is already active, the classes become usable as soon as all # referenced classes are registered. if has_domain_context() and current_domain == self: # Check by both the class name as well as the class' fully qualified name for name in [fqn(new_cls), new_cls.__name__]: if name in self._pending_class_resolutions: for field_obj, owner_cls in self._pending_class_resolutions[ name]: field_obj._resolve_to_cls(new_cls, owner_cls) # Remove from pending list now that the class has been resolved del self._pending_class_resolutions[name] return new_cls