def setUp(self) -> None: self.factory = InfrastructureFactory.construct(self.env) self.assertIsInstance(self.factory, self.expected_factory_class()) self.transcoder = JSONTranscoder() self.transcoder.register(UUIDAsHex()) self.transcoder.register(DecimalAsStr()) self.transcoder.register(DatetimeAsISO())
def test_cache_raises_aggregate_not_found_when_projector_func_returns_none( self): transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) transcoder.register(EmailAddressAsStr()) event_recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:")) event_recorder.create_table() event_store = EventStore( mapper=Mapper(transcoder=transcoder), recorder=event_recorder, ) repository = Repository( event_store, cache_maxsize=2, ) aggregate = Aggregate() event_store.put(aggregate.collect_events()) self.assertEqual(1, repository.get(aggregate.id).version) aggregate.trigger_event(Aggregate.Event) event_store.put(aggregate.collect_events()) with self.assertRaises(AggregateNotFound): repository.get(aggregate.id, projector_func=lambda _, __: None)
def test_cache_fastforward_false(self): transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) transcoder.register(EmailAddressAsStr()) event_recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:")) event_recorder.create_table() event_store = EventStore( mapper=Mapper(transcoder=transcoder), recorder=event_recorder, ) repository = Repository( event_store, cache_maxsize=2, fastforward=False, ) aggregate = Aggregate() event_store.put(aggregate.collect_events()) self.assertEqual(1, repository.get(aggregate.id).version) aggregate.trigger_event(Aggregate.Event) event_store.put(aggregate.collect_events()) self.assertEqual(1, repository.get(aggregate.id).version)
def register_transcodings(self, transcoder: Transcoder) -> None: """ Registers :class:`~eventsourcing.persistence.Transcoding` objects on given :class:`~eventsourcing.persistence.JSONTranscoder`. """ transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO())
def test(self): # Open an account. account = BankAccount.open( full_name="Alice", email_address="*****@*****.**", ) # Credit the account. account.append_transaction(Decimal("10.00")) account.append_transaction(Decimal("25.00")) account.append_transaction(Decimal("30.00")) # Collect pending events. pending = account.collect_events() # Construct event store. transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) transcoder.register(EmailAddressAsStr()) recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:")) event_store = EventStore( mapper=Mapper(transcoder), recorder=recorder, ) recorder.create_table() # Get last event. last_event = event_store.get(account.id, desc=True, limit=1) assert list(last_event) == [] # Store pending events. event_store.put(pending) # Get domain events. domain_events = event_store.get(account.id) # Reconstruct the bank account. copy = None for domain_event in domain_events: copy = domain_event.mutate(copy) # Check copy has correct attribute values. assert copy.id == account.id assert copy.balance == Decimal("65.00") # Get last event. events = event_store.get(account.id, desc=True, limit=1) events = list(events) assert len(events) == 1 last_event = events[0] assert last_event.originator_id == account.id assert type(last_event) == BankAccount.TransactionAppended
def test(self): # Open an account. account = BankAccount.open( full_name="Alice", email_address="*****@*****.**", ) # Credit the account. account.append_transaction(Decimal("10.00")) account.append_transaction(Decimal("25.00")) account.append_transaction(Decimal("30.00")) transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) transcoder.register(EmailAddressAsStr()) snapshot_store = EventStore( mapper=Mapper(transcoder=transcoder), recorder=SQLiteAggregateRecorder( SQLiteDatastore(":memory:"), events_table_name="snapshots", ), ) snapshot_store.recorder.create_table() # Clear pending events. account.collect_events() # Take a snapshot. snapshot = Snapshot.take(account) self.assertNotIn("pending_events", snapshot.state) # Store snapshot. snapshot_store.put([snapshot]) # Get snapshot. snapshots = snapshot_store.get(account.id, desc=True, limit=1) snapshot = next(snapshots) assert isinstance(snapshot, Snapshot) # Reconstruct the bank account. copy = snapshot.mutate() assert isinstance(copy, BankAccount) # Check copy has correct attribute values. assert copy.id == account.id assert copy.balance == Decimal("65.00")
def test(self): transcoder = JSONTranscoder() obj = CustomType2(CustomType1(UUID("b2723fe2c01a40d2875ea3aac6a09ff5"))) with self.assertRaises(TypeError) as cm: transcoder.encode(obj) self.assertEqual( cm.exception.args[0], ( "Object of type <class 'eventsourcing.tests.test_eventmapper." "CustomType2'> is not serializable. Please define and register " "a custom transcoding for this type." ), ) transcoder.register(UUIDAsHex()) transcoder.register(CustomType1AsDict()) transcoder.register(CustomType2AsDict()) data = transcoder.encode(obj) expect = ( b'{"_type_": "custom_type2_as_dict", "_data_": ' b'{"_type_": "custom_type1_as_dict", "_data_": ' b'{"_type_": "uuid_hex", "_data_": "b2723fe2c01' b'a40d2875ea3aac6a09ff5"}}}' ) self.assertEqual(data, expect) copy = transcoder.decode(data) self.assertIsInstance(copy, CustomType2) self.assertIsInstance(copy.value, CustomType1) self.assertIsInstance(copy.value.value, UUID) self.assertEqual(copy.value.value, obj.value.value) transcoder = JSONTranscoder() with self.assertRaises(TypeError) as cm: transcoder.decode(data) self.assertEqual( cm.exception.args[0], ( "Data serialized with name 'uuid_hex' is not deserializable. " "Please register a custom transcoding for this type." ), )
def test_contains(self): transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) event_recorder = POPOAggregateRecorder() event_store = EventStore( mapper=Mapper(transcoder=transcoder), recorder=event_recorder, ) aggregate = Aggregate() event_store.put(aggregate.collect_events()) repository = Repository(event_store) self.assertTrue(aggregate.id in repository) self.assertFalse(uuid4() in repository)
def test_subclasses(self) -> None: transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) event_recorder = POPOAggregateRecorder() event_store = EventStore( mapper=Mapper(transcoder=transcoder), recorder=event_recorder, ) # Subclass LogEvent. class TransactionLogEvent(LogEvent): pass class AccountCredited(TransactionLogEvent): pass class AccountDebited(TransactionLogEvent): pass # Subclass EventSourcedLog. class TransactionLog(EventSourcedLog[TransactionLogEvent]): def account_credited(self) -> AccountCredited: return self._trigger_event(logged_cls=AccountCredited) def account_debited(self) -> AccountDebited: return self._trigger_event(logged_cls=AccountDebited) transaction_log = TransactionLog( events=event_store, originator_id=uuid5(NAMESPACE_URL, "/aggregates"), logged_cls=TransactionLogEvent, ) account_credited = transaction_log.account_credited() self.assertIsInstance(account_credited, AccountCredited) account_debited = transaction_log.account_debited() self.assertIsInstance(account_debited, AccountDebited)
def test_cache_maxsize_nonzero(self): transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) transcoder.register(EmailAddressAsStr()) event_recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:")) event_recorder.create_table() event_store = EventStore( mapper=Mapper(transcoder=transcoder), recorder=event_recorder, ) repository = Repository(event_store, cache_maxsize=2) self.assertEqual(type(repository.cache), LRUCache) aggregate1 = Aggregate() self.assertFalse(aggregate1.id in repository) event_store.put(aggregate1.collect_events()) self.assertTrue(aggregate1.id in repository) aggregate2 = Aggregate() self.assertFalse(aggregate2.id in repository) event_store.put(aggregate2.collect_events()) self.assertTrue(aggregate2.id in repository) aggregate3 = Aggregate() self.assertFalse(aggregate3.id in repository) event_store.put(aggregate3.collect_events()) self.assertTrue(aggregate3.id in repository) self.assertFalse(aggregate1.id in repository.cache.cache) self.assertEqual(1, repository.get(aggregate1.id).version) self.assertEqual(1, repository.get(aggregate2.id).version) self.assertEqual(1, repository.get(aggregate3.id).version) aggregate1.trigger_event(Aggregate.Event) event_store.put(aggregate1.collect_events()) self.assertEqual(2, repository.get(aggregate1.id).version)
def test(self): transcoder = JSONTranscoder() obj = CustomType2(CustomType1( UUID("b2723fe2c01a40d2875ea3aac6a09ff5"))) with self.assertRaises(TypeError): transcoder.encode(obj) transcoder.register(UUIDAsHex()) transcoder.register(CustomType1AsDict()) transcoder.register(CustomType2AsDict()) data = transcoder.encode(obj) expect = (b'{"_type_": "custom_type2_as_dict", "_data_": ' b'{"_type_": "custom_type1_as_dict", "_data_": ' b'{"_type_": "uuid_hex", "_data_": "b2723fe2c01' b'a40d2875ea3aac6a09ff5"}}}') self.assertEqual(data, expect) copy = transcoder.decode(data) self.assertIsInstance(copy, CustomType2) self.assertIsInstance(copy.value, CustomType1) self.assertIsInstance(copy.value.value, UUID) self.assertEqual(copy.value.value, obj.value.value)
def test_with_snapshot_store(self) -> None: transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) event_recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:")) event_recorder.create_table() event_store: EventStore[Aggregate.Event] = EventStore( mapper=Mapper(transcoder=transcoder), recorder=event_recorder, ) snapshot_recorder = SQLiteAggregateRecorder(SQLiteDatastore(":memory:")) snapshot_recorder.create_table() snapshot_store: EventStore[Snapshot] = EventStore( mapper=Mapper(transcoder=transcoder), recorder=snapshot_recorder, ) repository: Repository = Repository(event_store, snapshot_store) # Check key error. with self.assertRaises(AggregateNotFound): repository.get(uuid4()) # Open an account. account = BankAccount.open( full_name="Alice", email_address="*****@*****.**", ) # Credit the account. account.append_transaction(Decimal("10.00")) account.append_transaction(Decimal("25.00")) account.append_transaction(Decimal("30.00")) # Collect pending events. pending = account.collect_events() # Store pending events. event_store.put(pending) copy = repository.get(account.id) assert isinstance(copy, BankAccount) # Check copy has correct attribute values. assert copy.id == account.id assert copy.balance == Decimal("65.00") snapshot = Snapshot( originator_id=account.id, originator_version=account.version, timestamp=datetime.now(tz=TZINFO), topic=get_topic(type(account)), state=account.__dict__, ) snapshot_store.put([snapshot]) copy2 = repository.get(account.id) assert isinstance(copy2, BankAccount) # Check copy has correct attribute values. assert copy2.id == account.id assert copy2.balance == Decimal("65.00") # Credit the account. account.append_transaction(Decimal("10.00")) event_store.put(account.collect_events()) # Check copy has correct attribute values. copy3 = repository.get(account.id) assert isinstance(copy3, BankAccount) assert copy3.id == account.id assert copy3.balance == Decimal("75.00") # Check can get old version of account. copy4 = repository.get(account.id, version=copy.version) assert isinstance(copy4, BankAccount) assert copy4.balance == Decimal("65.00") copy5 = repository.get(account.id, version=1) assert isinstance(copy5, BankAccount) assert copy5.balance == Decimal("0.00") copy6 = repository.get(account.id, version=2) assert isinstance(copy6, BankAccount) assert copy6.balance == Decimal("10.00") copy7 = repository.get(account.id, version=3) assert isinstance(copy7, BankAccount) assert copy7.balance == Decimal("35.00"), copy7.balance copy8 = repository.get(account.id, version=4) assert isinstance(copy8, BankAccount) assert copy8.balance == Decimal("65.00"), copy8.balance
def test_logging_aggregate_ids(self) -> None: class LoggedID(LogEvent): aggregate_id: UUID transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) event_recorder = POPOAggregateRecorder() event_store = EventStore( mapper=Mapper(transcoder=transcoder), recorder=event_recorder, ) log: EventSourcedLog[LoggedID] = EventSourcedLog( events=event_store, originator_id=uuid5(NAMESPACE_URL, "/aggregates"), logged_cls=LoggedID, ) id1 = uuid4() id2 = uuid4() id3 = uuid4() self.assertEqual(log.get_first(), None) self.assertEqual(log.get_last(), None) logged = log.trigger_event(aggregate_id=id1) event_store.put([logged]) first = log.get_first() assert first self.assertEqual(first.aggregate_id, id1) last = log.get_last() assert last self.assertEqual(last.aggregate_id, id1) logged = log.trigger_event(aggregate_id=id2) event_store.put([logged]) last = log.get_last() assert last self.assertEqual(last.aggregate_id, id2) logged = log.trigger_event(aggregate_id=id3, next_originator_version=3) event_store.put([logged]) last = log.get_last() assert last self.assertEqual(last.aggregate_id, id3) first = log.get_first() assert first self.assertEqual(first.aggregate_id, id1) ids = [e.aggregate_id for e in log.get()] self.assertEqual(ids, [id1, id2, id3]) ids = [e.aggregate_id for e in log.get(gt=1)] self.assertEqual(ids, [id2, id3]) ids = [e.aggregate_id for e in log.get(lte=2)] self.assertEqual(ids, [id1, id2]) ids = [e.aggregate_id for e in log.get(limit=1)] self.assertEqual(ids, [id1]) ids = [e.aggregate_id for e in log.get(desc=True)] self.assertEqual(ids, [id3, id2, id1])
def test(self): # Construct transcoder. transcoder = JSONTranscoder() transcoder.register(UUIDAsHex()) transcoder.register(DecimalAsStr()) transcoder.register(DatetimeAsISO()) # Construct cipher. cipher = AESCipher(cipher_key=AESCipher.create_key(16)) # Construct compressor. compressor = ZlibCompressor() # Construct mapper with cipher. mapper = Mapper(transcoder=transcoder, cipher=cipher) # Create a domain event. domain_event = BankAccount.TransactionAppended( originator_id=uuid4(), originator_version=123456, timestamp=datetime.now(tz=TZINFO), amount=Decimal("10.00"), ) # Map from domain event. stored_event = mapper.from_domain_event(domain_event) # Map to domain event. copy = mapper.to_domain_event(stored_event) # Check values are not visible. assert "Alice" not in str(stored_event.state) # Check decrypted copy has correct values. assert copy.originator_id == domain_event.originator_id assert copy.originator_version == domain_event.originator_version assert copy.timestamp == domain_event.timestamp, copy.timestamp assert copy.originator_version == domain_event.originator_version assert len(stored_event.state) == 171, len(stored_event.state) # Construct mapper with cipher and compressor. mapper = Mapper( transcoder=transcoder, cipher=cipher, compressor=compressor, ) # Map from domain event. stored_event = mapper.from_domain_event(domain_event) # Map to domain event. copy = mapper.to_domain_event(stored_event) # Check decompressed copy has correct values. assert copy.originator_id == domain_event.originator_id assert copy.originator_version == domain_event.originator_version assert len(stored_event.state) in ( 135, 136, 137, 138, 139, 140, 141, 142, 143, ), len(stored_event.state)
def setUp(self) -> None: self.factory = InfrastructureFactory.construct("TestCase") self.transcoder = JSONTranscoder() self.transcoder.register(UUIDAsHex()) self.transcoder.register(DecimalAsStr()) self.transcoder.register(DatetimeAsISO())
def construct_transcoder(self): transcoder = JSONTranscoder() transcoder.register(CustomType1AsDict()) transcoder.register(CustomType2AsDict()) transcoder.register(UUIDAsHex()) return transcoder