def get_snapshot(stored_entity_id, event_store, until=None): """ Get the last snapshot for entity. :rtype: Snapshot """ assert isinstance(event_store, AbstractEventStore) snapshot_entity_id = make_stored_entity_id(id_prefix_from_event_class(Snapshot), stored_entity_id) return event_store.get_most_recent_event(snapshot_entity_id, until=until)
def get_snapshot(stored_entity_id, event_store, until=None): """ Get the last snapshot for entity. :rtype: Snapshot """ assert isinstance(event_store, AbstractEventStore) snapshot_entity_id = make_stored_entity_id( id_prefix_from_event_class(Snapshot), stored_entity_id) return event_store.get_most_recent_event(snapshot_entity_id, until=until)
def get_events(self, after=None, until=None, limit=None, is_ascending=False, page_size=None): assert limit is None or limit > 0 if after is None: after_timestamp = None else: after_timestamp = timestamp_from_uuid(after) if until is None: until_timestamp = None else: until_timestamp = timestamp_from_uuid(until) now_timestamp = timestamp_from_uuid(uuid1()) started_on = self.log.started_on if is_ascending: # Start with the log start time, and continue until now. timestamp = started_on if after is None else max(after_timestamp, started_on) else: timestamp = now_timestamp if until is None else min(until_timestamp, now_timestamp) # Start counting events. count_events = 0 while True: entity_id = make_bucket_id(self.log.name, timestamp, self.log.bucket_size) stored_entity_id = make_stored_entity_id('MessageLogged', entity_id) for message_logged_event in self.event_store.get_entity_events( stored_entity_id=stored_entity_id, after=after, until=until, limit=limit, is_ascending=is_ascending, page_size=page_size, ): yield message_logged_event if limit is not None: count_events += 1 if count_events >= limit: raise StopIteration # See if there's another bucket. if is_ascending: next_timestamp = next_bucket_starts(timestamp, self.log.bucket_size) if next_timestamp > (until_timestamp or now_timestamp): raise StopIteration else: timestamp = next_timestamp else: if timestamp < (after_timestamp or started_on): raise StopIteration else: timestamp = previous_bucket_starts(timestamp, self.log.bucket_size)
def last_n(n): n = min(n, num_beats + 1) stored_entity_id = make_stored_entity_id('Example', example.id) repo = self.app.example_repo.event_player.event_store.stored_event_repo start_last_n = utc_now() last_n_stored_events = [] for _ in six.moves.range(repetitions): last_n_stored_events = repo.get_most_recent_events(stored_entity_id, limit=n) time_last_n = (utc_now() - start_last_n) / repetitions num_retrieved_events = len(list(last_n_stored_events)) events_per_second = num_retrieved_events / time_last_n print(("Time to get last {:>"+str(i+1)+"} events after {} events: {:.6f}s ({:.0f} events/s)" "").format(n, num_beats + 1, time_last_n, events_per_second))
def take_snapshot(entity, at_event_id): # Make the 'stored entity ID' for the entity, it is used as the Snapshot 'entity ID'. id_prefix = id_prefix_from_entity(entity) stored_entity_id = make_stored_entity_id(id_prefix, entity.id) # Create the snapshot event. snapshot = Snapshot( entity_id=stored_entity_id, domain_event_id=at_event_id, topic=topic_from_domain_class(entity.__class__), attrs=entity.__dict__.copy(), ) publish(snapshot) # Return the event. return snapshot
def serialize(self, domain_event): """ Serializes a domain event into a stored event. """ assert isinstance(domain_event, DomainEvent) event_data = { key: value for key, value in domain_event.__dict__.items() if key not in { 'domain_event_id', 'entity_id', 'entity_version', 'metadata', } } domain_event_class = type(domain_event) event_version = get_event_version(domain_event_class) return UnifiedStoredEvent( event_id=domain_event.domain_event_id, event_type=get_event_type(domain_event), event_version=event_version, event_data=self._json_encode(event_data), aggregate_id=domain_event.entity_id, aggregate_type=get_aggregate_type(domain_event), aggregate_version=domain_event.entity_version, create_date=datetime.fromtimestamp( timestamp_from_uuid(domain_event.domain_event_id)), metadata=self._json_encode(getattr(domain_event, 'metadata', None)), module_name=domain_event_class.__module__, class_name=domain_event_class.__qualname__, # have to have stored_entity_id because of the lib stored_entity_id=make_stored_entity_id( id_prefix_from_event(domain_event), domain_event.entity_id), )
def get_events(self, after=None, until=None, limit=None, is_ascending=False, page_size=None): assert limit is None or limit > 0 if after is None: after_timestamp = None else: after_timestamp = timestamp_from_uuid(after) if until is None: until_timestamp = None else: until_timestamp = timestamp_from_uuid(until) now_timestamp = timestamp_from_uuid(uuid1()) started_on = self.log.started_on if is_ascending: # Start with the log start time, and continue until now. timestamp = started_on if after is None else max( after_timestamp, started_on) else: timestamp = now_timestamp if until is None else min( until_timestamp, now_timestamp) # Start counting events. count_events = 0 while True: entity_id = make_bucket_id(self.log.name, timestamp, self.log.bucket_size) stored_entity_id = make_stored_entity_id('MessageLogged', entity_id) for message_logged_event in self.event_store.get_entity_events( stored_entity_id=stored_entity_id, after=after, until=until, limit=limit, is_ascending=is_ascending, page_size=page_size, ): yield message_logged_event if limit is not None: count_events += 1 if count_events >= limit: raise StopIteration # See if there's another bucket. if is_ascending: next_timestamp = next_bucket_starts(timestamp, self.log.bucket_size) if next_timestamp > (until_timestamp or now_timestamp): raise StopIteration else: timestamp = next_timestamp else: if timestamp < (after_timestamp or started_on): raise StopIteration else: timestamp = previous_bucket_starts(timestamp, self.log.bucket_size)
def make_stored_entity_id(self, entity_id): """Prefixes the given entity ID with the ID prefix for entity's type. """ return make_stored_entity_id(self.id_prefix, entity_id)