def match_event_processing(self): self.event_processing_start_id = self.raw_data.get_event_type_id( 'event_processing_start') self.event_processing_end_id = self.raw_data.get_event_type_id( 'event_processing_end') if (self.event_processing_start_id is None) or \ (self.event_processing_end_id is None): self.tracking_execution = False for ev in self.raw_data.events: self.tracked_events.append(TrackedEvent(ev, None, None)) return for i in range(1, len(self.raw_data.events)): if self.raw_data.events[i].type_id == self.event_processing_start_id: self.start_event = self.raw_data.events[i] for j in range(i - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.raw_data.events[j].data[0] == self.start_event.data[0]: self.submit_event = self.raw_data.events[j] break # comparing memory addresses of event processing start and end # to identify matching events if self.raw_data.events[i].type_id == self.event_processing_end_id: if self.submit_event is not None \ and self.raw_data.events[i].data[0] == self.start_event.data[0]: self.tracked_events.append( TrackedEvent( self.submit_event, self.start_event.timestamp, self.raw_data.events[i].timestamp)) self.submit_event = None
def transmit_events(self): if self.event_filename and self.event_types_filename: self.csvfile = self.processed_events.init_writing_data_to_files( self.event_filename, self.event_types_filename) while True: event = self._read_single_event() if self.raw_data.registered_events_types[ event.type_id].name == PROFILER_FATAL_ERROR_EVENT_NAME: self.logger.error( "Fatal error of Profiler on device! Event has been dropped. " "Data buffer has overflown. No more events will be received." ) if event.type_id == self.event_processing_start_id: self.start_event = event for i in range(len(self.temp_events) - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.temp_events[i].data[0] == self.start_event.data[0]: self.submit_event = self.temp_events[i] self.submitted_event_type = self.submit_event.type_id del self.temp_events[i] break elif event.type_id == self.event_processing_end_id: # comparing memory addresses of event processing start and # end to identify matching events if self.submitted_event_type is not None and event.data[0] \ == self.start_event.data[0]: tracked_event = TrackedEvent(self.submit_event, self.start_event.timestamp, event.timestamp) if self.csvfile is not None: self._write_event_to_file(self.csvfile, tracked_event) if self.sending: self._send_event(tracked_event) self.submitted_event_type = None elif not self.processed_events.is_event_tracked(event.type_id): tracked_event = TrackedEvent(event, None, None) if self.csvfile is not None: self._write_event_to_file(self.csvfile, tracked_event) if self.sending: self._send_event(tracked_event) else: self.temp_events.append(event)
def transmit_events(self): if self.event_filename and self.event_types_filename: self.csvfile = self.processed_events.init_writing_data_to_files( self.event_filename, self.event_types_filename) while True: event = self._read_single_event() if event.type_id == self.event_processing_start_id: self.start_event = event for i in range(len(self.temp_events) - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.temp_events[i].data[0] == self.start_event.data[0]: self.submit_event = self.temp_events[i] self.submitted_event_type = self.submit_event.type_id del self.temp_events[i] break elif event.type_id == self.event_processing_end_id: # comparing memory addresses of event processing start and # end to identify matching events if self.submitted_event_type is not None and event.data[0] \ == self.start_event.data[0]: tracked_event = TrackedEvent(self.submit_event, self.start_event.timestamp, event.timestamp) if self.csvfile is not None: self._write_event_to_file(self.csvfile, tracked_event) if self.sending: self._send_event(tracked_event) self.submitted_event_type = None elif not self.processed_events.is_event_tracked(event.type_id): tracked_event = TrackedEvent(event, None, None) if self.csvfile is not None: self._write_event_to_file(self.csvfile, tracked_event) if self.sending: self._send_event(tracked_event) else: self.temp_events.append(event)
def _read_events_csv(self, filename): try: with open(filename, 'r', newline='') as csvfile: header = csvfile.readline() header = header.rstrip('\r\n') fieldnames = header.split(',') assert fieldnames == TrackedEvent.TRACKED_EVENT_FIELDNAMES for line in csvfile: self.tracked_events.append(TrackedEvent.deserialize(line)) except IOError: self.logger.error("Problem with accessing file: " + filename) sys.exit()
def main(): descr = "Merge data from Peripheral and Central. Synchronization events" \ " should be registered at the beginning and at the end of" \ " measurements (used to compensate clock drift)." parser = argparse.ArgumentParser(description=descr) parser.add_argument("peripheral_dataset", help="Name of Peripheral dataset") parser.add_argument("peripheral_sync_event", help="Event used for synchronization - Peripheral") parser.add_argument("central_dataset", help="Name of Central dataset") parser.add_argument("central_sync_event", help="Event used for synchronization - Central") parser.add_argument("result_dataset", help="Name for result dataset") args = parser.parse_args() evt_peripheral = ProcessedEvents() evt_peripheral.read_data_from_files(args.peripheral_dataset + ".csv", args.peripheral_dataset + ".json") evt_central = ProcessedEvents() evt_central.read_data_from_files(args.central_dataset + ".csv", args.central_dataset + ".json") # Compensating clock drift - based on synchronization events sync_evt_peripheral = evt_peripheral.get_event_type_id(args.peripheral_sync_event) sync_evt_central = evt_central.get_event_type_id(args.central_sync_event) sync_peripheral = list(filter(lambda x: x.submit.type_id == sync_evt_peripheral, evt_peripheral.tracked_events)) sync_central = list(filter(lambda x: x.submit.type_id == sync_evt_central, evt_central.tracked_events)) ts_peripheral = np.array(list(map(lambda x: list([x.submit.timestamp, x.proc_start_time, x.proc_end_time]), evt_peripheral.tracked_events))) sync_ts_peripheral = list(map(lambda x: x.submit.timestamp, sync_peripheral)) sync_ts_central = list(map(lambda x: x.submit.timestamp, sync_central)) sync_diffs_central = np.subtract(sync_ts_central[1:], sync_ts_central[:-1]) sync_diffs_peripheral = np.subtract(sync_ts_peripheral[1:], sync_ts_peripheral[:-1]) rounded_diffs_central = list(map(lambda x: round(x, 1), sync_diffs_central)) rounded_diffs_peripheral = list(map(lambda x: round(x, 1), sync_diffs_peripheral)) shift_c = rounded_diffs_central.index(rounded_diffs_peripheral[0]) shift_p = rounded_diffs_peripheral.index(rounded_diffs_central[0]) if shift_c < shift_p: sync_ts_central = sync_ts_central[shift_c:] elif shift_p < shift_c: sync_ts_peripheral = sync_ts_peripheral[shift_p:] if len(sync_ts_central) < len(sync_ts_peripheral): sync_ts_peripheral = sync_ts_peripheral[:len(sync_ts_central)] elif len(sync_ts_peripheral) < len(sync_ts_central): sync_ts_central = sync_ts_central[:len(sync_ts_peripheral)] new_ts_peripheral = ts_peripheral.copy() new_ts_peripheral[list(list(elem is not None for elem in row) for row in new_ts_peripheral)] = \ sync_peripheral_ts(ts_peripheral, sync_ts_peripheral, sync_ts_central) assert len(new_ts_peripheral) == len(ts_peripheral) # Reindexing, renaming and compensating time differences for peripheral events max_central_id = max([int(i) for i in evt_central.registered_events_types]) assert len(new_ts_peripheral) == len(evt_peripheral.tracked_events) evt_peripheral.tracked_events = list(map(lambda x, y: TrackedEvent(Event(x.submit.type_id + max_central_id + 1, y[0], x.submit.data), y[1], y[2]), evt_peripheral.tracked_events, new_ts_peripheral)) evt_peripheral.registered_events_types = {k + max_central_id + 1 : EventType(v.name + "_peripheral", v.data_types, v.data_descriptions) for k, v in evt_peripheral.registered_events_types.items()} evt_central.registered_events_types = { k : EventType(v.name + "_central", v.data_types, v.data_descriptions) for k, v in evt_central.registered_events_types.items()} # Filter out events that are out of synchronization period TIME_DIFF = 0.5 start_time = sync_ts_central[0] - TIME_DIFF end_time = sync_ts_central[-1] + TIME_DIFF evt_peripheral.tracked_events = list(filter(lambda x: x.submit.timestamp >= start_time and (x.proc_end_time <= end_time if x.proc_end_time is not None \ else x.submit.timestamp <= end_time), evt_peripheral.tracked_events)) evt_central.tracked_events = list(filter(lambda x: x.submit.timestamp >= start_time and (x.proc_end_time <= end_time if x.proc_end_time is not None \ else x.submit.timestamp <= end_time), evt_central.tracked_events)) # Filter out events that were out of interpolation range evt_peripheral.tracked_events = list(filter(lambda x: INTERP_OUT_OF_RANGE_VAL not in (x.submit.timestamp, x.proc_start_time, x.proc_end_time), evt_peripheral.tracked_events)) all_registered_events_types = evt_peripheral.registered_events_types.copy() all_registered_events_types.update(evt_central.registered_events_types) result_events = ProcessedEvents() result_events.tracked_events = evt_peripheral.tracked_events + evt_central.tracked_events result_events.registered_events_types = all_registered_events_types result_events.write_data_to_files(args.result_dataset + ".csv", args.result_dataset + ".json") print('Profiler data merged successfully')
def animate_events_real_time(self, fig, selected_events_types, one_line): rects = [] events = [] xranges = [] for i in range(0, len(selected_events_types)): xranges.append([]) while not self.queue.empty(): event = self.queue.get() if event is None: self.logger.info("Stopped collecting new events") self.close_event(None) if self.processed_events.tracking_execution: if event.type_id == self.processed_events.event_processing_start_id: self.processed_events.start_event = event for i in range(len(self.temp_events) - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.temp_events[i].data[ 0] == self.processed_events.start_event.data[ 0]: self.processed_events.submit_event = self.temp_events[ i] events.append(self.temp_events[i]) self.submitted_event_type = self.processed_events.submit_event.type_id del self.temp_events[i] break elif event.type_id == self.processed_events.event_processing_end_id: # comparing memory addresses of event processing start and # end to identify matching events if self.submitted_event_type is not None and event.data[0] \ == self.processed_events.start_event.data[0]: rects.append( matplotlib.patches.Rectangle( (self.processed_events.start_event.timestamp, self.processed_events.submit_event.type_id - self.draw_state.event_processing_rect_height / 2), event.timestamp - self.processed_events.start_event.timestamp, self.draw_state.event_processing_rect_height, edgecolor='black')) self.processed_events.tracked_events.append( TrackedEvent( self.processed_events.submit_event, self.processed_events.start_event.timestamp, event.timestamp)) self.submitted_event_type = None else: self.temp_events.append(event) if event.timestamp > time.time() - self.start_time + self.draw_state.added_time - \ 0.2 * self.draw_state.timeline_width: self.draw_state.added_time += 0.05 if event.timestamp < time.time() - self.start_time + self.draw_state.added_time - \ 0.8 * self.draw_state.timeline_width: self.draw_state.added_time -= 0.05 events.append(event) else: events.append(event) self.processed_events.raw_data.events.append(event) # translating plot if not self.draw_state.synchronized_with_events: # ignore translating plot for stale events if not self.draw_state.stale_events_displayed: self.draw_state.stale_events_displayed = True else: # translate plot for new events if len(events) != 0: self.draw_state.added_time = events[-1].timestamp - \ 0.3 * self.draw_state.timeline_width self.draw_state.synchronized_with_events = True if not self.draw_state.paused: self.draw_state.timeline_max = time.time() - self.start_time + \ self.draw_state.added_time self.draw_state.ax.set_xlim( self.draw_state.timeline_max - self.draw_state.timeline_width, self.draw_state.timeline_max) # plotting events y = list(map(lambda x: x.type_id, events)) x = list(map(lambda x: x.timestamp, events)) self.draw_state.ax.plot( x, y, marker='o', linestyle=' ', color='r', markersize=self.draw_state.event_submit_markersize) self.draw_state.ax.add_collection(PatchCollection(rects)) plt.gcf().canvas.flush_events()
def animate_events_real_time(self, fig): rects = [] events = [] #Receive events while True: try: data = self.in_stream.recv_ev() except StreamError as err: if err.args[1] == StreamError.TIMEOUT_MSG: break self.logger.error("Receiving error: {}. Exiting".format(err)) self.close_event(None) sys.exit() data_str = data.decode() tracked_event = TrackedEvent.deserialize(data_str) events.append(tracked_event.submit) self.processed_events.tracked_events.append(tracked_event) if tracked_event.proc_start_time is not None: assert tracked_event.proc_end_time is not None rects.append( matplotlib.patches.Rectangle( (tracked_event.proc_start_time, tracked_event.submit.type_id - self.draw_state.event_processing_rect_height/2), tracked_event.proc_end_time - tracked_event.proc_start_time, self.draw_state.event_processing_rect_height, edgecolor='black')) # translating plot if not self.draw_state.synchronized_with_events: # ignore translating plot for stale events if not self.draw_state.stale_events_displayed: self.draw_state.stale_events_displayed = True else: # translate plot for new events if len(events) != 0: self.draw_state.added_time = events[-1].timestamp - \ 0.3 * self.draw_state.timeline_width self.draw_state.synchronized_with_events = True if not self.draw_state.paused: self.draw_state.timeline_max = time.time() - self.start_time + \ self.draw_state.added_time self.draw_state.ax.set_xlim( self.draw_state.timeline_max - self.draw_state.timeline_width, self.draw_state.timeline_max) # plotting events y = list(map(lambda x: x.type_id, events)) x = list(map(lambda x: x.timestamp, events)) self.draw_state.ax.plot( x, y, marker='o', linestyle=' ', color='r', markersize=self.draw_state.event_submit_markersize) self.draw_state.ax.add_collection(PatchCollection(rects)) plt.gcf().canvas.flush_events() if self.event_close.is_set(): self.close_event(None) if self.close_event_flag: sys.exit()