def test(self): with InvoicingApplication.mixin(PopoApplication)() as app: assert isinstance(app, InvoicingApplication) # Create an invoice. app.create_invoice(number="0001", amount=Decimal("10.00")) # Get invoice from repository. invoice = app.get_invoice("0001") self.assertIsInstance(invoice, Invoice) # Create an invoice. app.create_invoice(number="0002", amount=Decimal("10.00")) # Create an invoice. app.create_invoice(number="0003", amount=Decimal("10.00")) # Create an invoice. app.create_invoice(number="0004", amount=Decimal("10.00")) reader = NotificationLogReader(app.notification_log) event_notifications = reader.read_list() events = [ app.event_store.event_mapper.event_from_topic_and_state( e["topic"], e["state"]) for e in event_notifications ] total_amount_invoiced = sum([e.amount for e in events]) self.assertEqual(total_amount_invoiced, Decimal("40.00"))
def test_application_with_infrastructure(self): with self.construct_concrete_application() as app: # Start with a new table. app.drop_table() app.drop_table() app.setup_table() app.setup_table() # Check the notifications. reader = NotificationLogReader(app.notification_log) old_notifications = reader.list_notifications() len_old = len(old_notifications) # Check the application's persistence policy, # repository, and event store, are working. aggregate = ExampleAggregateRoot.__create__() aggregate.__save__() self.assertTrue(aggregate.id in app.repository) # Check the notifications. reader = NotificationLogReader(app.notification_log) notifications = reader.list_notifications() self.assertEqual(1 + len_old, len(notifications)) topic = "eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Created" self.assertEqual(topic, notifications[len_old]["topic"]) app.drop_table()
def test(self): with self.get_application() as app: # Start with a new table. app.drop_table() app.drop_table() app.setup_table() app.setup_table() # Check the application's persistence policy, # repository, and event store, are working. aggregate = ExampleAggregateRoot.__create__() aggregate.__save__() self.assertTrue(aggregate.id in app.repository) # Check the notifications. reader = NotificationLogReader(app.notification_log) notifications = reader.read_list() self.assertEqual(1, len(notifications)) topic = 'eventsourcing.tests.core_tests.test_aggregate_root#ExampleAggregateRoot.Created' self.assertEqual(topic, notifications[0]['topic']) app.drop_table()
def __init__(self, normal_speed, scale_factor, stop_event: Event, is_verbose=False, seen_prompt_events=None, processes=None): super(ProcessRunningClockThread, self).__init__(daemon=True) self.normal_speed = normal_speed self.scale_factor = scale_factor self.stop_event = stop_event self.seen_prompt_events = seen_prompt_events self.processes = processes self.last_tick_time = None self.last_process_time = None self.all_tick_durations = deque() self.tick_adjustment = 0.0 self.is_verbose = is_verbose if normal_speed and scale_factor: self.tick_interval = 1 / normal_speed / scale_factor else: self.tick_interval = None if self.tick_interval: self.tick_durations_window_size = max( 100, int(round(1 / self.tick_interval, 0))) else: self.tick_durations_window_size = 1000 # Construct lists of followers for each process. self.followers = {} for process_name, process in self.processes.items(): self.followers[process_name] = [] for process_name, process in self.processes.items(): for upstream_process_name in process.readers: self.followers[upstream_process_name].append(process_name) # Construct a notification log reader for each process. self.readers = {} for process_name, process in self.processes.items(): reader = NotificationLogReader( notification_log=process.notification_log, use_direct_query_if_available=True) self.readers[process_name] = reader
def test(self): # Construct wiki application. with FederatedWikiApplication.mixin(SQLAlchemyApplication)() as app: # Just for the IDE. assert isinstance(app, FederatedWikiApplication) # Add all the detected transmissions to the wiki. for transmission in transmitted_data.transmissions: # Decide the wiki page slug. from_operator = transmission["from_operator"] # Get or create the wiki page. try: app.get_page(slug=from_operator) except PageNotFound: app.create_page( title="Operator: %s" % from_operator, slug=from_operator ) # Append a paragraph to operator's page for each transmission. app.append_paragraph( slug=from_operator, paragraph=json.dumps(transmission) ) # Check operator 'KI5DYI' has 15 paragraphs on their page. page = app.get_page(slug="KI5DYI") self.assertEqual(len(page["paragraphs"]), 15) # Check their first transmission looks ok. self.assertEqual(json.loads(page["paragraphs"][0])["tx_frequency"], 361) self.assertEqual(json.loads(page["paragraphs"][0])["to_operator"], "SV1AIQ") self.assertEqual(json.loads(page["paragraphs"][0])["message"], "RR73") # Check their last transmission looks ok. self.assertEqual(json.loads(page["paragraphs"][-1])["tx_frequency"], 1688) self.assertEqual(json.loads(page["paragraphs"][-1])["to_operator"], "PA5RH") self.assertEqual(json.loads(page["paragraphs"][-1])["message"], "-07") # Process the transmission events into a set of frequencies and a set of operators. frequencies = set() operators = set() timestamps = [] log_reader = NotificationLogReader(notification_log=app.notification_log) for notification in log_reader.read_list(): if notification["topic"].endswith("ParagraphAppended"): domain_event_state = json.loads(notification["state"]) paragraph = domain_event_state["paragraph"] transmission = json.loads(paragraph) tx_frequency = transmission["tx_frequency"] frequencies.add(int(tx_frequency)) operators.add(transmission["from_operator"]) operators.add(transmission["to_operator"]) timestamps.append(transmission['timestamp']) # Check the maximum and minimum frequencies. self.assertEqual(max(frequencies), 4245) self.assertEqual(min(frequencies), 342) # Check the number of operators. self.assertEqual(len(operators), 258) # Check the first and last timestamps (order is preserved). self.assertEqual(timestamps[0], 143330) self.assertEqual(timestamps[-1], 153145)
def test_remote_notification_log(self): num_notifications = 42 section_size = 5 # Build a notification log (fixture). self.append_notifications(num_notifications) # Start a simple server. from wsgiref.util import setup_testing_defaults from wsgiref.simple_server import make_server port = 8080 base_url = "http://127.0.0.1:{}/notifications/".format(port) def simple_app(environ, start_response): """Simple WSGI application.""" setup_testing_defaults(environ) # Identify log and section from request. path_info = environ["PATH_INFO"] try: section_id = path_info.strip("/").split("/")[-1] except ValueError: # Start response. status = "404 Not Found" headers = [("Content-type", "text/plain; charset=utf-8")] start_response(status, headers) return [] # Select the notification log. notification_log = self.create_notification_log(section_size) # Get serialized section. json_encoder = ObjectJSONEncoder(separators=JSON_SEPARATORS) view = NotificationLogView(notification_log, json_encoder) resource = view.present_resource(section_id) # Todo: Maybe redirect if the section ID is a mismatch, so # the URL is good for cacheing. # Start response. status = "200 OK" headers = [("Content-type", "text/plain; charset=utf-8")] start_response(status, headers) # Return a list of lines. return [(line + "\n").encode("utf8") for line in resource.split("\n")] httpd = make_server("", port, simple_app) print("Serving on port {}...".format(port)) thread = Thread(target=httpd.serve_forever) thread.setDaemon(True) thread.start() try: # Use reader with client to read all items in remote feed after item 5. notification_log = RemoteNotificationLog(base_url) # Just before we start, test the deserialise_section_size exceptions. notification_log.deserialize_section_size('1') with self.assertRaises(ValueError): notification_log.deserialize_section_size('"1') with self.assertRaises(TypeError): notification_log.deserialize_section_size('"1"') # Get all the items. notification_log_reader = NotificationLogReader( notification_log=notification_log) items_from_start = notification_log_reader.list_notifications() # Check we got all the items. self.assertEqual(len(items_from_start), num_notifications) self.assertEqual(items_from_start[0]["id"], 1) self.assertEqual(items_from_start[0]["state"], b"item1") self.assertEqual( items_from_start[0]["topic"], "eventsourcing.domain.model.events#DomainEvent", ) expected_section_count = ceil(num_notifications / float(section_size)) self.assertEqual(notification_log_reader.section_count, expected_section_count) # Get all the items from item 5. items_from_5 = list(notification_log_reader[section_size - 1:]) # Check we got everything after item 5. self.assertEqual(len(items_from_5), num_notifications - section_size + 1) self.assertEqual(items_from_5[0]["id"], section_size) self.assertEqual( items_from_5[0]["topic"], "eventsourcing.domain.model.events#DomainEvent", ) self.assertEqual(items_from_5[0]["state"], "item{}".format(section_size).encode("utf8")) expected_section_count = ceil(num_notifications / float(section_size)) self.assertEqual(notification_log_reader.section_count, expected_section_count) # Check ValueError is raised for deserialization errors. with self.assertRaises(ValueError): notification_log.deserialize_section("invalid json") finally: httpd.shutdown() thread.join() httpd.server_close()
def test(self): # Build notification log. section_size = 5 notification_log = self.create_notification_log( section_size=section_size) # Append 13 notifications. self.append_notifications(13) # Construct notification log reader. reader = NotificationLogReader(notification_log) # Check position. self.assertEqual(reader.position, 0) # Read all notifications. all_notifications = list(reader) self.assertEqual(13, len(all_notifications)) # Check position. self.assertEqual(reader.position, 13) # Add some more items to the log. self.append_notifications(13, 21) # Read subsequent notifications. subsequent_notifications_notifications = list(reader) self.assertEqual(len(subsequent_notifications_notifications), 8) # Check position. self.assertEqual(reader.position, 21) subsequent_notifications_notifications = list(reader) self.assertEqual(len(subsequent_notifications_notifications), 0) # Set position. reader.seek(13) subsequent_notifications_notifications = list(reader) self.assertEqual(len(subsequent_notifications_notifications), 8) # # Read items after a particular position. self.assertEqual(len(list(reader[0:])), 21) self.assertEqual(len(list(reader[1:])), 20) self.assertEqual(len(list(reader[2:])), 19) self.assertEqual(len(list(reader[3:])), 18) self.assertEqual(len(list(reader[13:])), 8) self.assertEqual(len(list(reader[18:])), 3) self.assertEqual(len(list(reader[19:])), 2) self.assertEqual(len(list(reader[20:])), 1) self.assertEqual(len(list(reader[21:])), 0) # Check last item numbers less than 1 cause a value errors. with self.assertRaises(ValueError): reader.position = -1 list(reader) with self.assertRaises(ValueError): list(reader.seek(-1)) # Resume from a saved position. saved_position = 5 advance_by = 3 reader.seek(saved_position) self.assertEqual(reader.position, saved_position) reader.list_notifications(advance_by=advance_by) self.assertEqual(reader.position, saved_position + advance_by) # Read items between particular positions. # - check stops at end of slice, and position tracks ok self.assertEqual(reader[0]["id"], 1) self.assertEqual(reader.position, 1) self.assertEqual(next(reader)["id"], 2) self.assertEqual(reader.position, 2) reader.seek(5) self.assertEqual(next(reader)["id"], 6) self.assertEqual(reader.position, 6) reader.seek(0) list(reader) self.assertEqual(reader.position, 21) self.assertEqual(len(list(reader[0:1])), 1) self.assertEqual(reader.position, 1) self.assertEqual(len(list(reader[1:3])), 2) self.assertEqual(reader.position, 3) self.assertEqual(len(list(reader[2:5])), 3) self.assertEqual(reader.position, 5) self.assertEqual(len(list(reader[3:7])), 4) self.assertEqual(reader.position, 7) self.assertEqual(len(list(reader[13:20])), 7) self.assertEqual(reader.position, 20) self.assertEqual(len(list(reader[18:20])), 2) self.assertEqual(reader.position, 20) self.assertEqual(len(list(reader[19:20])), 1) self.assertEqual(reader.position, 20) self.assertEqual(len(list(reader[20:20])), 0) self.assertEqual(reader.position, 20) self.assertEqual(len(list(reader[21:20])), 0) self.assertEqual(reader.position, 21) with self.assertRaises(StopIteration): next(reader)
def pull_notifications(): global app notifications = NotificationLogReader(app.notification_log) for notification in notifications: # print(notification) pass
def follow(self, upstream_application_name, notification_log): # Create a reader. reader = NotificationLogReader(notification_log, use_direct_query_if_available=True) self.readers[upstream_application_name] = reader
def __init__( self, application_topic, pipeline_id, infrastructure_topic, setup_table, address, upstreams, downstreams, push_prompt_interval, ): super(ProcessorServer, self).__init__() # Make getting notifications more efficient. notificationlog.USE_REGULAR_SECTIONS = False notificationlog.DEFAULT_SECTION_SIZE = 100 self.has_been_stopped = Event() signal(SIGINT, self.stop) self.application_class: Type[ProcessApplication] = resolve_topic( application_topic) self.pipeline_id = pipeline_id self.application_name = self.application_class.create_name() infrastructure_class: Type[ ApplicationWithConcreteInfrastructure] = resolve_topic( infrastructure_topic) self.application = self.application_class.mixin( infrastructure_class=infrastructure_class)( pipeline_id=self.pipeline_id, setup_table=setup_table) self.address = address self.json_encoder = ObjectJSONEncoder() self.json_decoder = ObjectJSONDecoder() self.upstreams = upstreams self.downstreams = downstreams self.prompt_events = {} self.push_prompt_interval = push_prompt_interval self.notification_log_view = NotificationLogView( self.application.notification_log, json_encoder=ObjectJSONEncoder(), ) for upstream_name in self.upstreams: self.prompt_events[upstream_name] = Event() # self.prompt_events[upstream_name].set() self.downstream_prompt_event = Event() subscribe(self._set_downstream_prompt_event, is_prompt_to_pull) self.serve() self.clients: Dict[str, ProcessorClient] = {} self.clients_lock = Lock() start_client_threads = [] remotes = {} remotes.update(self.upstreams) remotes.update(self.downstreams) for name, address in remotes.items(): thread = StartClient(self.clients, name, address) thread.setDaemon(True) thread.start() start_client_threads.append(thread) for thread in start_client_threads: thread.join() # logging.info("%s connected to %s" % (self.application_name, thread.name)) self.push_prompts_thread = Thread(target=self._push_prompts) self.push_prompts_thread.setDaemon(True) self.push_prompts_thread.start() # self.count_of_events = 0 self.pull_notifications_threads = {} self.unprocessed_domain_event_queue = Queue() for upstream_name, upstream_address in self.upstreams.items(): thread = PullNotifications( prompt_event=self.prompt_events[upstream_name], reader=NotificationLogReader( RemoteNotificationLog( client=self.clients[upstream_name], json_decoder=ObjectJSONDecoder(), section_size=self.application.notification_log. section_size, )), process_application=self.application, event_queue=self.unprocessed_domain_event_queue, upstream_name=upstream_name, has_been_stopped=self.has_been_stopped, ) thread.setDaemon(True) self.pull_notifications_threads[upstream_name] = thread self.process_events_thread = Thread(target=self._process_events) self.process_events_thread.setDaemon(True) self.process_events_thread.start() # Start the threads. for thread in self.pull_notifications_threads.values(): thread.start() # Wait for termination. self.wait_for_termination()