def get_entity_events(self, stored_entity_id, after=None, until=None, limit=None, query_ascending=True, results_ascending=True): if stored_entity_id not in self._by_stored_entity_id: return [] else: # Get a copy of the list of stored events for this entity. stored_events = self._by_stored_entity_id[stored_entity_id][:] # Stored event, here, are in ascending order (because they get appended to a list). if not query_ascending: stored_events.reverse() # Get timestamps (floats) from the UUID hex strings (chronologically to compare events). if after is None: after_timestamp = None else: after_timestamp = timestamp_from_uuid(after) if until is None: until_timestamp = None else: until_timestamp = timestamp_from_uuid(until) # Start counting events (needed to stop when limit is reached). count = 0 # Initialise the query results. query_results = [] # Iterate over the stored events, excluding things that don't match. for event in stored_events: event_timestamp = timestamp_from_uuid(event.event_id) if limit is not None and count >= limit: break # Exclude if earlier than the 'after' time. if after_timestamp: if query_ascending: if event_timestamp <= after_timestamp: continue else: if event_timestamp < after_timestamp: continue # Exclude if later than the 'until' time. if until_timestamp: if query_ascending: if event_timestamp > until_timestamp: continue else: if event_timestamp >= until_timestamp: continue query_results.append(event) count += 1 if results_ascending != query_ascending: query_results.reverse() return query_results
def test_timestamp_from_uuid(self): until = time.time() uuid = uuid1() after = time.time() uuid_timestamp = timestamp_from_uuid(uuid) self.assertLess(until, uuid_timestamp) self.assertGreater(after, uuid_timestamp) # Check timestamp_from_uuid() works with hex strings, as well as UUID objects. self.assertEqual(timestamp_from_uuid(uuid.hex), timestamp_from_uuid(uuid))
def test_timestamp_from_uuid(self): until = utc_now() uuid = uuid1() after = utc_now() uuid_timestamp = timestamp_from_uuid(uuid) self.assertLess(until, uuid_timestamp) self.assertGreater(after, uuid_timestamp) # Check it works with hex strings, as well as UUID objects. self.assertEqual(timestamp_from_uuid(uuid.hex), timestamp_from_uuid(uuid))
def get_events(self, after=None, until=None, limit=None, is_ascending=False, page_size=None): assert limit is None or limit > 0 if after is None: after_timestamp = None else: after_timestamp = timestamp_from_uuid(after) if until is None: until_timestamp = None else: until_timestamp = timestamp_from_uuid(until) now_timestamp = timestamp_from_uuid(uuid1()) started_on = self.log.started_on if is_ascending: # Start with the log start time, and continue until now. timestamp = started_on if after is None else max(after_timestamp, started_on) else: timestamp = now_timestamp if until is None else min(until_timestamp, now_timestamp) # Start counting events. count_events = 0 while True: entity_id = make_bucket_id(self.log.name, timestamp, self.log.bucket_size) stored_entity_id = make_stored_entity_id('MessageLogged', entity_id) for message_logged_event in self.event_store.get_entity_events( stored_entity_id=stored_entity_id, after=after, until=until, limit=limit, is_ascending=is_ascending, page_size=page_size, ): yield message_logged_event if limit is not None: count_events += 1 if count_events >= limit: raise StopIteration # See if there's another bucket. if is_ascending: next_timestamp = next_bucket_starts(timestamp, self.log.bucket_size) if next_timestamp > (until_timestamp or now_timestamp): raise StopIteration else: timestamp = next_timestamp else: if timestamp < (after_timestamp or started_on): raise StopIteration else: timestamp = previous_bucket_starts(timestamp, self.log.bucket_size)
def test_timestamp_from_uuid(self): until = utc_now() uuid = uuid1() after = utc_now() uuid_timestamp = timestamp_from_uuid(uuid) self.assertLess(until, uuid_timestamp) self.assertGreater(after, uuid_timestamp)
def append_message(self, message, level='INFO'): assert isinstance(message, six.string_types) domain_event_id = create_domain_event_id() entity_bucket_id = make_bucket_id(self.name, timestamp_from_uuid(domain_event_id), self.bucket_size) event = MessageLogged( entity_id=entity_bucket_id, message=message, level=level, ) publish(event) return event
def test(self): # Check base class can be sub-classed. class Event(EventWithTimeuuid): pass # Check event can be instantiated with an event_id. event_id = uuid1() event = Event(event_id=event_id) self.assertEqual(event.event_id, event_id) # Check event can be instantiated without an event_id. time1 = time() event = Event() self.assertGreater(timestamp_from_uuid(event.event_id), time1) self.assertLess(timestamp_from_uuid(event.event_id), time()) # Check the event_id can't be reassigned. with self.assertRaises(AttributeError): # noinspection PyPropertyAccess event.event_id = time()
def test(self): with ExampleApplicationWithTimeuuidSequencedItems() as app: # Create entity. entity1 = app.start_entity() self.assertIsInstance(entity1._initial_event_id, UUID) self.assertEqual(entity1.created_on, timestamp_from_uuid(entity1._initial_event_id)) # Read entity from repo. retrieved_obj = app.repository[entity1.id] self.assertEqual(retrieved_obj.id, entity1.id)
def get_entity_events(self, stored_entity_id, after=None, until=None, limit=None, query_ascending=True, results_ascending=True): events = self.EventModel.objects.filter( stored_entity_id=stored_entity_id) if query_ascending: events = events.order_by('id') else: events = events.order_by('-id') if after is not None: after_ts = datetime.datetime.fromtimestamp( timestamp_from_uuid(after)) if query_ascending: events = events.filter(create_date__gt=after_ts) else: events = events.filter(create_date__gte=after_ts) if until is not None: until_ts = datetime.datetime.fromtimestamp( timestamp_from_uuid(until)) if query_ascending: events = events.filter(create_date__lte=until_ts) else: events = events.filter(create_date__lt=until_ts) if limit is not None: events = events[:limit] events = list(events) if results_ascending != query_ascending: events.reverse() return [from_model_instance(e) for e in events]
def test(self): with ExampleApplicationWithTimeuuidSequencedItems() as app: # Create entity. entity1 = app.start_entity() self.assertIsInstance(entity1._initial_event_id, UUID) expected_timestamp = timestamp_from_uuid(entity1._initial_event_id) self.assertEqual(entity1.created_on, expected_timestamp) self.assertTrue(entity1.last_modified_on, expected_timestamp) # Read entity from repo. retrieved_obj = app.repository[entity1.id] self.assertEqual(retrieved_obj.id, entity1.id) retrieved_obj.finish() assert retrieved_obj.id not in app.repository
def new_stored_event(event_id, event_type, event_data, aggregate_id, aggregate_version): return UnifiedStoredEvent( event_id=event_id, event_type=event_type, event_version=1, event_data=json.dumps(event_data), aggregate_id=aggregate_id, aggregate_type='SampleAggregate', aggregate_version=aggregate_version, create_date=datetime.fromtimestamp(timestamp_from_uuid(event_id)), stored_entity_id='SampleAggregate::' + aggregate_id, metadata='', module_name='', class_name='', )
def test_event_attributes(self): event = Example.Created(entity_id='entity1', a=1, b=2) # Check constructor keyword args lead to read-only attributes. self.assertEqual(1, event.a) self.assertEqual(2, event.b) self.assertRaises(AttributeError, getattr, event, 'c') self.assertRaises(AttributeError, setattr, event, 'c', 3) # Check domain event has auto-generated timestamp. self.assertIsInstance(event.timestamp, float) # Check timestamp value can be given to domain events. self.assertEqual(3, Example.Created(entity_id='entity1', a=1, b=2, domain_event_id=3).domain_event_id) domain_event_id = uuid1().hex self.assertEqual(timestamp_from_uuid(domain_event_id), Example.Created(entity_id='entity1', a=1, b=2, domain_event_id=domain_event_id).timestamp)
def serialize(self, domain_event): """ Serializes a domain event into a stored event. """ assert isinstance(domain_event, DomainEvent) event_data = { key: value for key, value in domain_event.__dict__.items() if key not in { 'domain_event_id', 'entity_id', 'entity_version', 'metadata', } } domain_event_class = type(domain_event) event_version = get_event_version(domain_event_class) return UnifiedStoredEvent( event_id=domain_event.domain_event_id, event_type=get_event_type(domain_event), event_version=event_version, event_data=self._json_encode(event_data), aggregate_id=domain_event.entity_id, aggregate_type=get_aggregate_type(domain_event), aggregate_version=domain_event.entity_version, create_date=datetime.fromtimestamp( timestamp_from_uuid(domain_event.domain_event_id)), metadata=self._json_encode(getattr(domain_event, 'metadata', None)), module_name=domain_event_class.__module__, class_name=domain_event_class.__qualname__, # have to have stored_entity_id because of the lib stored_entity_id=make_stored_entity_id( id_prefix_from_event(domain_event), domain_event.entity_id), )
def timestamp(self): return timestamp_from_uuid(self.__dict__['domain_event_id'])
def get_events(self, after=None, until=None, limit=None, is_ascending=False, page_size=None): assert limit is None or limit > 0 if after is None: after_timestamp = None else: after_timestamp = timestamp_from_uuid(after) if until is None: until_timestamp = None else: until_timestamp = timestamp_from_uuid(until) now_timestamp = timestamp_from_uuid(uuid1()) started_on = self.log.started_on if is_ascending: # Start with the log start time, and continue until now. timestamp = started_on if after is None else max( after_timestamp, started_on) else: timestamp = now_timestamp if until is None else min( until_timestamp, now_timestamp) # Start counting events. count_events = 0 while True: entity_id = make_bucket_id(self.log.name, timestamp, self.log.bucket_size) stored_entity_id = make_stored_entity_id('MessageLogged', entity_id) for message_logged_event in self.event_store.get_entity_events( stored_entity_id=stored_entity_id, after=after, until=until, limit=limit, is_ascending=is_ascending, page_size=page_size, ): yield message_logged_event if limit is not None: count_events += 1 if count_events >= limit: raise StopIteration # See if there's another bucket. if is_ascending: next_timestamp = next_bucket_starts(timestamp, self.log.bucket_size) if next_timestamp > (until_timestamp or now_timestamp): raise StopIteration else: timestamp = next_timestamp else: if timestamp < (after_timestamp or started_on): raise StopIteration else: timestamp = previous_bucket_starts(timestamp, self.log.bucket_size)
def created_on(self): return timestamp_from_uuid(self._initial_event_id)
def last_modified_on(self): return timestamp_from_uuid(self._last_event_id)