def construct_item_args(self, domain_event): """ Constructs attributes of a sequenced item from the given domain event. """ # Copy the state of the event. event_attrs = domain_event.__dict__.copy() # Get the sequence ID. sequence_id = event_attrs.get(self.sequence_id_attr_name) # Get the position in the sequence. position = event_attrs.get(self.position_attr_name) # Get the topic from the event attrs, otherwise from the class. topic = get_topic(domain_event.__class__) # Serialise the remaining event attribute values. data = json_dumps(event_attrs, cls=self.json_encoder_class) # Encrypt data. if self.cipher: data = self.cipher.encrypt(data) # Get the 'other' args. # - these are meant to be derivative of the other attributes, # to populate database fields, and shouldn't affect the hash. other_args = tuple((getattr(domain_event, name) for name in self.other_attr_names)) return (sequence_id, position, topic, data) + other_args
def construct_event_records(self, pending_events, causal_dependencies=None): # Convert to event records. sequenced_items = self.event_store.item_from_event(pending_events) event_records = self.event_store.record_manager.to_records( sequenced_items) # Set notification log IDs, and causal dependencies. if len(event_records): # Todo: Maybe keep track of what this probably is, to avoid query. Like log reader, invalidate on error. if self.set_notification_ids: current_max = self.event_store.record_manager.get_max_record_id( ) or 0 for domain_event, event_record in zip(pending_events, event_records): if type(domain_event).__notifiable__: current_max += 1 event_record.id = current_max else: event_record.id = 'event-not-notifiable' if self.use_causal_dependencies: assert hasattr(self.event_store.record_manager.record_class, 'causal_dependencies') causal_dependencies = json_dumps(causal_dependencies) # Only need first event to carry the dependencies. event_records[0].causal_dependencies = causal_dependencies return event_records
def construct_event_records(self, aggregates, causal_dependencies): assert isinstance(aggregates, (list, tuple)) record_manager = self.event_store.record_manager assert isinstance(record_manager, RelationalRecordManager) pending_events = [] for aggregate in aggregates: pending_events += aggregate.__batch_pending_events__() # Sort the events by timestamp. pending_events.sort(key=lambda x: x.timestamp) # Convert to event records. sequenced_items = self.event_store.to_sequenced_item(pending_events) event_records = record_manager.to_records(sequenced_items) # Set notification log IDs, and causal dependencies. if len(event_records): current_max = record_manager.get_max_record_id() or 0 for event_record in event_records: current_max += 1 event_record.id = current_max # Only need first event to carry the dependencies. if hasattr(record_manager.record_class, 'causal_dependencies'): causal_dependencies = json_dumps(causal_dependencies) event_records[0].causal_dependencies = causal_dependencies return event_records
def get_item_topic_and_state(self, domain_event_class, event_attrs): # Get the topic from the event attrs, otherwise from the class. topic = get_topic(domain_event_class) # Serialise the event attributes. state = json_dumps(event_attrs, cls=self.json_encoder_class) # Encrypt serialised state. if self.cipher: state = self.cipher.encrypt(state) return topic, state
def hash_object(json_encoder_class, obj): """ Calculates SHA-256 hash of JSON encoded 'obj'. :param obj: Object to be hashed. :return: SHA-256 as hexadecimal string. :rtype str """ s = json_dumps( (obj, SALT_FOR_DATA_INTEGRITY), cls=json_encoder_class, ) return hashlib.sha256(s.encode()).hexdigest()
def present_section(self, section_id): """ Returns a section of notification log in JSON format. :param section_id: ID of the section of the notification log. :return: Identified section of notification log in JSON format. :rtype: str """ section = self.notification_log[section_id] is_archived = bool(section.next_id) section_json = json_dumps(section.__dict__, self.json_encoder_class) return section_json, is_archived
def construct_event_records(self, pending_events, causal_dependencies=None): # Convert to event records. sequenced_items = self.event_store.to_sequenced_item(pending_events) event_records = self.event_store.record_manager.to_records(sequenced_items) # Set notification log IDs, and causal dependencies. if len(event_records): current_max = self.event_store.record_manager.get_max_record_id() or 0 for domain_event, event_record in zip(pending_events, event_records): if type(domain_event).__notifiable__: current_max += 1 event_record.id = current_max else: event_record.id = '' # Only need first event to carry the dependencies. if hasattr(self.event_store.record_manager.record_class, 'causal_dependencies'): causal_dependencies = json_dumps(causal_dependencies) event_records[0].causal_dependencies = causal_dependencies return event_records
def present_section(self, section_id): section = self.notification_log[section_id] is_archived = bool(section.next_id) section_json = json_dumps(section.__dict__, self.json_encoder_class) return section_json, is_archived
def hash_object(json_encoder_class, obj): s = json_dumps( (obj, SALT_FOR_DATA_INTEGRITY), cls=json_encoder_class, ) return hashlib.sha256(s.encode()).hexdigest()