def plot_events_real_time(self, queue, finish_event, selected_events_types=None, one_line=False): self.start_time = time.time() self.queue = queue self.finish_event = finish_event self.raw_data = EventsData([], {}) self.raw_data.registered_events_types = queue.get() if selected_events_types is None: selected_events_types = list( self.raw_data.registered_events_types.keys()) fig = self._prepare_plot(selected_events_types) self.start_stop_ax = plt.axes([0.8, 0.025, 0.1, 0.04]) self.start_stop_button = Button(self.start_stop_ax, 'Start/Stop') self.start_stop_button.on_clicked(self.on_click_start_stop) plt.sca(self.draw_state.ax) fig.canvas.mpl_connect('close_event', self.real_time_close_event) ani = animation.FuncAnimation( fig, self.animate_events_real_time, fargs=[selected_events_types, one_line], interval=self.plot_config['refresh_time']) plt.show()
def __init__(self, config=RttNordicConfig, finish_event=None, queue=None, event_filename=None, event_types_filename=None, log_lvl=logging.WARNING): self.event_filename = event_filename self.event_types_filename = event_types_filename self.config = config self.finish_event = finish_event self.queue = queue self.received_events = EventsData([], {}) self.timestamp_overflows = 0 self.after_half = False self.desc_buf = "" self.bufs = list() self.bcnt = 0 self.last_read_time = time.time() self.reading_data = True self.logger = logging.getLogger('RTT Profiler Host') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console) self.connect()
class ProcessedEvents(): def __init__(self): self.raw_data = EventsData([], {}) self.tracked_events = [] self.event_processing_start_id = None self.event_processing_end_id = None self.tracking_execution = True self.submit_event = None self.start_event = None self.logger = logging.getLogger('Processed Events') self.logger_console = logging.StreamHandler() self.logger.setLevel(logging.WARNING) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console) def match_event_processing(self): self.event_processing_start_id = self.raw_data.get_event_type_id( 'event_processing_start') self.event_processing_end_id = self.raw_data.get_event_type_id( 'event_processing_end') if (self.event_processing_start_id is None) or \ (self.event_processing_end_id is None): self.tracking_execution = False for ev in self.raw_data.events: self.tracked_events.append(TrackedEvent(ev, None, None)) return for i in range(1, len(self.raw_data.events)): if self.raw_data.events[ i].type_id == self.event_processing_start_id: self.start_event = self.raw_data.events[i] for j in range(i - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.raw_data.events[j].data[ 0] == self.start_event.data[0]: self.submit_event = self.raw_data.events[j] break # comparing memory addresses of event processing start and end # to identify matching events if self.raw_data.events[i].type_id == self.event_processing_end_id: if self.submit_event is not None \ and self.raw_data.events[i].data[0] == self.start_event.data[0]: self.tracked_events.append( TrackedEvent(self.submit_event, self.start_event.timestamp, self.raw_data.events[i].timestamp)) self.submit_event = None
def __init__(self, own_recv_socket_dict, own_send_socket_dict=None, remote_socket_dict=None, config=RttNordicConfig, event_filename=None, event_types_filename=None, log_lvl=logging.INFO): self.config = config self.event_filename = event_filename self.event_types_filename = event_types_filename self.csvfile = None timeouts = {'descriptions': None, 'events': None} self.in_stream = Stream(own_recv_socket_dict, timeouts) if own_send_socket_dict is not None and remote_socket_dict is not None: self.sending = True self.out_stream = Stream(own_send_socket_dict, timeouts, remote_socket_dict=remote_socket_dict) else: self.sending = False self.timestamp_overflows = 0 self.after_half = False self.processed_events = ProcessedEvents() self.temp_events = [] self.submitted_event_type = None self.raw_data = EventsData([], {}) self.event_processing_start_id = None self.event_processing_end_id = None self.submit_event = None self.start_event = None self.bufs = list() self.bcnt = 0 self.logger = logging.getLogger('Profiler model creator') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console)
def __init__(self): self.raw_data = EventsData([], {}) self.tracked_events = [] self.event_processing_start_id = None self.event_processing_end_id = None self.tracking_execution = True self.submit_event = None self.start_event = None self.logger = logging.getLogger('Processed Events') self.logger_console = logging.StreamHandler() self.logger.setLevel(logging.WARNING) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console)
def __init__(self, log_lvl=logging.WARNING): plt.rcParams['toolbar'] = 'None' plt.ioff() self.raw_data = EventsData([], {}) self.plot_config = PlotNordicConfig self.draw_state = DrawState( self.plot_config['timeline_width_init'], self.plot_config['event_processing_rect_height'], self.plot_config['event_submit_markersize']) self.processed_data = ProcessedData() self.submitted_event_type = None self.logger = logging.getLogger('RTT Plot Nordic') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter('[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console)
def __init__(self, stream, event_close, sending_events=False, config=RttNordicConfig, event_filename=None, event_types_filename=None, log_lvl=logging.INFO): self.config = config self.event_filename = event_filename self.event_types_filename = event_types_filename self.csvfile = None self.event_close = event_close timeouts = { 'descriptions': 1, 'events': 1 } self.stream = stream self.stream.set_timeouts(timeouts) self.sending = sending_events self.timestamp_overflows = 0 self.after_half = False self.processed_events = ProcessedEvents() self.temp_events = [] self.submitted_event_type = None self.raw_data = EventsData([], {}) self.event_processing_start_id = None self.event_processing_end_id = None self.submit_event = None self.start_event = None self.bufs = list() self.bcnt = 0 self.logger = logging.getLogger('Profiler model creator') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter('[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console)
class ModelCreator: def __init__(self, own_recv_socket_dict, own_send_socket_dict=None, remote_socket_dict=None, config=RttNordicConfig, event_filename=None, event_types_filename=None, log_lvl=logging.INFO): self.config = config self.event_filename = event_filename self.event_types_filename = event_types_filename self.csvfile = None timeouts = {'descriptions': None, 'events': None} self.in_stream = Stream(own_recv_socket_dict, timeouts) if own_send_socket_dict is not None and remote_socket_dict is not None: self.sending = True self.out_stream = Stream(own_send_socket_dict, timeouts, remote_socket_dict=remote_socket_dict) else: self.sending = False self.timestamp_overflows = 0 self.after_half = False self.processed_events = ProcessedEvents() self.temp_events = [] self.submitted_event_type = None self.raw_data = EventsData([], {}) self.event_processing_start_id = None self.event_processing_end_id = None self.submit_event = None self.start_event = None self.bufs = list() self.bcnt = 0 self.logger = logging.getLogger('Profiler model creator') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console) def shutdown(self): if self.csvfile is not None: self.processed_events.finish_writing_data_to_files( self.csvfile, self.event_filename, self.event_types_filename) def _get_buffered_data(self, num_bytes): buf = bytearray() while len(buf) < num_bytes: tbuf = self.bufs[0] size = num_bytes - len(buf) if len(tbuf) <= size: buf.extend(tbuf) del self.bufs[0] else: buf.extend(tbuf[0:size]) self.bufs[0] = tbuf[size:] self.bcnt -= num_bytes return buf def _read_bytes(self, num_bytes): while True: if self.bcnt >= num_bytes: break try: buf = self.in_stream.recv_ev() except StreamError as err: self.logger.error("Receiving error: {}".format(err)) self.close() if len(buf) > 0: self.bufs.append(buf) self.bcnt += len(buf) return self._get_buffered_data(num_bytes) def _timestamp_from_ticks(self, clock_ticks): ts_ticks_aggregated = self.timestamp_overflows * self.config[ 'timestamp_raw_max'] ts_ticks_aggregated += clock_ticks ts_s = ts_ticks_aggregated * self.config['ms_per_timestamp_tick'] / 1000 return ts_s def transmit_all_events_descriptions(self): try: bytes = self.in_stream.recv_desc() except StreamError as err: self.logger.error("Receiving error: {}. Exiting".format(err)) sys.exit() desc_buf = bytes.decode() f = StringIO(desc_buf) reader = csv.reader(f, delimiter=',') for row in reader: # Empty field is send after last event description if len(row) == 0: break name = row[0] id = int(row[1]) data_type = row[2:len(row) // 2 + 1] data = row[len(row) // 2 + 1:] self.raw_data.registered_events_types[id] = EventType( name, data_type, data) if name not in ('event_processing_start', 'event_processing_end'): self.processed_events.registered_events_types[id] = EventType( name, data_type, data) self.event_processing_start_id = \ self.raw_data.get_event_type_id('event_processing_start') self.event_processing_end_id = \ self.raw_data.get_event_type_id('event_processing_end') if self.sending: event_types_dict = dict( (k, v.serialize()) for k, v in self.processed_events.registered_events_types.items()) json_et_string = json.dumps(event_types_dict) try: self.out_stream.send_desc(json_et_string.encode()) except StreamError as err: self.logger.error("Error: {}. Unable to send data".format(err)) sys.exit() def _read_single_event(self): id = int.from_bytes(self._read_bytes(1), byteorder=self.config['byteorder'], signed=False) et = self.raw_data.registered_events_types[id] buf = self._read_bytes(4) timestamp_raw = (int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) if self.after_half \ and timestamp_raw < 0.4 * self.config['timestamp_raw_max']: self.timestamp_overflows += 1 self.after_half = False if timestamp_raw > 0.6 * self.config['timestamp_raw_max']: self.after_half = True timestamp = self._timestamp_from_ticks(timestamp_raw) def process_int32(self, data): buf = self._read_bytes(4) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=True)) def process_uint32(self, data): buf = self._read_bytes(4) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) def process_int16(self, data): buf = self._read_bytes(2) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=True)) def process_uint16(self, data): buf = self._read_bytes(2) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) def process_int8(self, data): buf = self._read_bytes(1) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=True)) def process_uint8(self, data): buf = self._read_bytes(1) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) def process_string(self, data): buf = self._read_bytes(1) buf = self._read_bytes( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) data.append(buf.decode()) READ_BYTES = { "u8": process_uint8, "s8": process_int8, "u16": process_uint16, "s16": process_int16, "u32": process_uint32, "s32": process_int32, "s": process_string, "t": process_uint32 } data = [] for event_data_type in et.data_types: READ_BYTES[event_data_type](self, data) return Event(id, timestamp, data) def _send_event(self, tracked_event): event_string = tracked_event.serialize() try: self.out_stream.send_ev(event_string.encode()) except StreamError as err: if err.args[1] != 'closed': self.logger.error("Error. Unable to send data: {}".format(err)) # Receiver has been closed self.close() def _write_event_to_file(self, csvfile, tracked_event): try: csvfile.write(tracked_event.serialize() + '\r\n') except IOError: self.logger.error("Problem with accessing csv file") self.close() def transmit_events(self): if self.event_filename and self.event_types_filename: self.csvfile = self.processed_events.init_writing_data_to_files( self.event_filename, self.event_types_filename) while True: event = self._read_single_event() if event.type_id == self.event_processing_start_id: self.start_event = event for i in range(len(self.temp_events) - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.temp_events[i].data[0] == self.start_event.data[0]: self.submit_event = self.temp_events[i] self.submitted_event_type = self.submit_event.type_id del self.temp_events[i] break elif event.type_id == self.event_processing_end_id: # comparing memory addresses of event processing start and # end to identify matching events if self.submitted_event_type is not None and event.data[0] \ == self.start_event.data[0]: tracked_event = TrackedEvent(self.submit_event, self.start_event.timestamp, event.timestamp) if self.csvfile is not None: self._write_event_to_file(self.csvfile, tracked_event) if self.sending: self._send_event(tracked_event) self.submitted_event_type = None elif not self.processed_events.is_event_tracked(event.type_id): tracked_event = TrackedEvent(event, None, None) if self.csvfile is not None: self._write_event_to_file(self.csvfile, tracked_event) if self.sending: self._send_event(tracked_event) else: self.temp_events.append(event) def start(self): self.transmit_all_events_descriptions() self.transmit_events() def close(self): self.logger.info("Real time transmission closed") self.shutdown() self.logger.info("Events data saved to files") sys.exit()
class RttNordicProfilerHost: def __init__(self, config=RttNordicConfig, finish_event=None, queue=None, event_filename=None, event_types_filename=None, log_lvl=logging.WARNING): self.event_filename = event_filename self.event_types_filename = event_types_filename self.config = config self.finish_event = finish_event self.queue = queue self.received_events = EventsData([], {}) self.timestamp_overflows = 0 self.after_half = False self.desc_buf = "" self.bufs = list() self.bcnt = 0 self.last_read_time = time.time() self.reading_data = True self.logger = logging.getLogger('RTT Profiler Host') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console) self.connect() def rtt_get_device_family(): with API.API(API.DeviceFamily.UNKNOWN) as api: api.connect_to_emu_without_snr() return api.read_device_family() def connect(self): try: self.jlink = API.API(self.config['device_family']) except ValueError: self.logger.warning( 'Unrecognized device family. Trying to recognize automatically' ) self.config[ 'device_family'] = rtt_nordic_profiler_host.rtt_get_device_family( ) self.logger.info('Recognized device family: ' + self.config['device_family']) self.jlink = API.API(self.config['device_family']) self.jlink.open() if self.config['device_snr'] is not None: self.jlink.connect_to_emu_with_snr(self.config['device_snr']) else: self.jlink.connect_to_emu_without_snr() if self.config['reset_on_start']: self.jlink.sys_reset() self.jlink.go() self.jlink.rtt_start() time.sleep(1) # time required for initialization self.logger.info("Connected to device via RTT") def shutdown(self): self.disconnect() self._read_remaining_events() if self.event_filename and self.event_types_filename: self.received_events.write_data_to_files(self.event_filename, self.event_types_filename) def disconnect(self): self.stop_logging_events() # read remaining data to buffer try: buf = self.jlink.rtt_read(self.config['rtt_data_channel'], self.config['rtt_read_chunk_size'], encoding=None) except: self.logger.error("Problem with reading RTT data.") while len(buf) > 0: self.bufs.append(buf) self.bcnt += len(buf) buf = self.jlink.rtt_read(self.config['rtt_data_channel'], self.config['rtt_read_chunk_size'], encoding=None) try: self.jlink.rtt_stop() self.jlink.disconnect_from_emu() self.jlink.close() except: self.logger.error("JLink connection lost. Saving collected data.") return self.logger.info("Disconnected from device") def _get_buffered_data(self, num_bytes): buf = bytearray() while len(buf) < num_bytes: tbuf = self.bufs[0] size = num_bytes - len(buf) if len(tbuf) <= size: buf = buf + tbuf del (self.bufs[0]) else: buf = buf + tbuf[0:size] self.bufs[0] = tbuf[size:] self.bcnt -= num_bytes return buf def _read_bytes(self, num_bytes): now = time.time() while self.reading_data: if now - self.last_read_time < self.config['rtt_read_period'] \ and self.bcnt >= num_bytes: break try: buf = self.jlink.rtt_read(self.config['rtt_data_channel'], self.config['rtt_read_chunk_size'], encoding=None) except: self.logger.error("Problem with reading RTT data.") self.shutdown() if len(buf) > 0: self.bufs.append(buf) self.bcnt += len(buf) if len(buf) > self.config['rtt_additional_read_thresh']: continue self.last_read_time = now if self.bcnt >= num_bytes: break if self.finish_event is not None and self.finish_event.is_set(): self.finish_event.clear() self.logger.info("Real time transmission closed") self.shutdown() self.logger.info("Events data saved to files") sys.exit() time.sleep(0.05) return self._get_buffered_data(num_bytes) def _calculate_timestamp_from_clock_ticks(self, clock_ticks): return self.config['ms_per_timestamp_tick'] * ( clock_ticks + self.timestamp_overflows * self.config['timestamp_raw_max']) / 1000 def _read_single_event_description(self): while '\n' not in self.desc_buf: try: buf_temp = self.jlink.rtt_read( self.config['rtt_info_channel'], self.config['rtt_read_chunk_size'], encoding='utf-8') except: self.logger.error("Problem with reading RTT data.") self.shutdown() self.desc_buf += buf_temp time.sleep(0.1) desc = str(self.desc_buf[0:self.desc_buf.find('\n')]) # Empty field is send after last event description if len(desc) == 0: return None, None self.desc_buf = self.desc_buf[self.desc_buf.find('\n') + 1:] desc_fields = desc.split(',') name = desc_fields[0] id = int(desc_fields[1]) data_type = [] for i in range(2, len(desc_fields) // 2 + 1): data_type.append(desc_fields[i]) data = [] for i in range(len(desc_fields) // 2 + 1, len(desc_fields)): data.append(desc_fields[i]) return id, EventType(name, data_type, data) def _read_all_events_descriptions(self): while True: id, et = self._read_single_event_description() if (id is None or et is None): break self.received_events.registered_events_types[id] = et def get_events_descriptions(self): self._send_command(Command.INFO) self._read_all_events_descriptions() if self.queue is not None: self.queue.put(self.received_events.registered_events_types) self.logger.info("Received events descriptions") self.logger.info("Ready to start logging events") def _read_single_event_rtt(self): id = int.from_bytes(self._read_bytes(1), byteorder=self.config['byteorder'], signed=False) et = self.received_events.registered_events_types[id] buf = self._read_bytes(4) timestamp_raw = (int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) if self.after_half \ and timestamp_raw < 0.2 * self.config['timestamp_raw_max']: self.timestamp_overflows += 1 self.after_half = False if timestamp_raw > 0.6 * self.config['timestamp_raw_max'] \ and timestamp_raw < 0.9 * self.config['timestamp_raw_max']: self.after_half = True timestamp = self._calculate_timestamp_from_clock_ticks(timestamp_raw) data = [] for i in et.data_types: signum = False if i[0] == 's': signum = True buf = self._read_bytes(4) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=signum)) return Event(id, timestamp, data) def _read_remaining_events(self): self.reading_data = False while self.bcnt != 0: event = self._read_single_event_rtt() self.received_events.events.append(event) if self.queue is not None: self.queue.put(event) # End of transmission if self.queue is not None: self.queue.put(None) def read_events_rtt(self, time_seconds): self.logger.info("Start logging events data") self.start_logging_events() start_time = time.time() current_time = start_time while current_time - start_time < time_seconds or time_seconds < 0: event = self._read_single_event_rtt() self.received_events.events.append(event) if self.queue is not None: self.queue.put(event) current_time = time.time() self.logger.info("Real time transmission closed") self.shutdown() self.logger.info("Events data saved to files") sys.exit() def start_logging_events(self): self._send_command(Command.START) def stop_logging_events(self): self._send_command(Command.STOP) def _send_command(self, command_type): command = bytearray(1) command[0] = command_type.value try: self.jlink.rtt_write(self.config['rtt_command_channel'], command, None) except: self.logger.error("Problem with writing RTT data.")
class PlotNordic(): def __init__(self, log_lvl=logging.WARNING): plt.rcParams['toolbar'] = 'None' plt.ioff() self.raw_data = EventsData([], {}) self.plot_config = PlotNordicConfig self.draw_state = DrawState( self.plot_config['timeline_width_init'], self.plot_config['event_processing_rect_height'], self.plot_config['event_submit_markersize']) self.processed_data = ProcessedData() self.submitted_event_type = None self.logger = logging.getLogger('RTT Plot Nordic') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console) def read_data_from_files(self, events_filename, events_types_filename): self.raw_data.read_data_from_files(events_filename, events_types_filename) if not self.raw_data.verify(): self.logger.warning("Missing event descriptions") def write_data_to_files(self, events_filename, events_types_filename): self.raw_data.write_data_to_files(events_filename, events_types_filename) def on_click_start_stop(self, event): if self.draw_state.paused: if self.draw_state.l_line is not None: self.draw_state.l_line.remove() self.draw_state.l_line = None self.draw_state.l_line_coord = None if self.draw_state.r_line is not None: self.draw_state.r_line.remove() self.draw_state.r_line = None self.draw_state.r_line_coord = None if self.draw_state.duration_marker is not None: self.draw_state.duration_marker.remove() self.draw_state.paused = not self.draw_state.paused def _get_event_type_id(self, type_name): for key, value in self.raw_data.registered_events_types.items(): if type_name == value.name: return key return None def _prepare_plot(self, selected_events_types): self.processed_data.event_processing_start_id = self._get_event_type_id( 'event_processing_start') self.processed_data.event_processing_end_id = self._get_event_type_id( 'event_processing_end') if (self.processed_data.event_processing_start_id is None) or (self.processed_data.event_processing_end_id is None): self.processed_data.tracking_execution = False self.draw_state.ax = plt.gca() self.draw_state.ax.set_navigate(False) fig = plt.gcf() fig.set_size_inches(self.plot_config['window_width_inch'], self.plot_config['window_height_inch'], forward=True) fig.canvas.draw() plt.xlabel("Time [s]") plt.title("Custom events") plt.grid(True) minimum = selected_events_types[0] maximum = selected_events_types[0] ticks = [] labels = [] for j in selected_events_types: if j != self.processed_data.event_processing_start_id and j != self.processed_data.event_processing_end_id: if j > maximum: maximum = j if j < minimum: minimum = j ticks.append(j) labels.append(self.raw_data.registered_events_types[j].name) plt.yticks(ticks, labels, rotation=45) # min and max range of y axis are bigger by one so markers fit nicely # on plot self.draw_state.y_max = maximum + 1 self.draw_state.y_height = maximum - minimum + 2 plt.ylim([minimum - 1, maximum + 1]) self.draw_state.selected_event_textbox = self.draw_state.ax.text( 0.05, 0.95, self.draw_state.selected_event_text, fontsize=10, transform=self.draw_state.ax.transAxes, verticalalignment='top', bbox=dict(boxstyle='round', alpha=0.5, facecolor='linen')) self.draw_state.selected_event_textbox.set_visible(False) fig.canvas.mpl_connect('scroll_event', self.scroll_event) fig.canvas.mpl_connect('button_press_event', self.button_press_event) fig.canvas.mpl_connect('button_release_event', self.button_release_event) return fig def _get_relative_coords(self, event): # relative position of plot - x0, y0, width, height ax_loc = self.draw_state.ax.get_position().bounds window_size = plt.gcf().get_size_inches() * \ plt.gcf().dpi # window size - width, height x_rel = (event.x - ax_loc[0] * window_size[0]) \ / ax_loc[2] / window_size[0] y_rel = (event.y - ax_loc[1] * window_size[1]) \ / ax_loc[3] / window_size[1] return x_rel, y_rel def scroll_event(self, event): x_rel, y_rel = self._get_relative_coords(event) if event.button == 'up': if self.draw_state.paused: self.draw_state.timeline_max = self.draw_state.timeline_max - (1 - x_rel) * \ (self.draw_state.timeline_width - self.draw_state.timeline_width * self.plot_config['timeline_scale_factor']) self.draw_state.timeline_width = self.draw_state.timeline_width * \ self.plot_config['timeline_scale_factor'] if event.button == 'down': if self.draw_state.paused: self.draw_state.timeline_max = self.draw_state.timeline_max + (1 - x_rel) * \ (self.draw_state.timeline_width / self.plot_config['timeline_scale_factor'] - self.draw_state.timeline_width) self.draw_state.timeline_width = self.draw_state.timeline_width / \ self.plot_config['timeline_scale_factor'] self.draw_state.ax.set_xlim( self.draw_state.timeline_max - self.draw_state.timeline_width, self.draw_state.timeline_max) plt.draw() def _find_closest_event(self, x_coord, y_coord): if self.processed_data.tracked_events: filtered_id = list( filter(lambda x: x.submit.type_id == round(y_coord), self.processed_data.tracked_events)) if len(filtered_id) == 0: return None matching_processing = list( filter( lambda x: x.start.timestamp < x_coord and x.end.timestamp > x_coord, filtered_id)) if len(matching_processing): return matching_processing[0] dists = list( map( lambda x: min([ abs(x.submit.timestamp - x_coord), abs(x.start.timestamp - x_coord), abs(x.end.timestamp - x_coord) ]), filtered_id)) return filtered_id[np.argmin(dists)] else: filtered_id = list( filter(lambda x: x.type_id == round(y_coord), self.raw_data.events)) if len(filtered_id) == 0: return None dists = list(map(lambda x: abs(x.timestamp - x_coord), filtered_id)) return filtered_id[np.argmin(dists)] def _stringify_time(time_seconds): if time_seconds > 0.1: return '%.5f' % (time_seconds) + ' s' return '%.5f' % (1000 * time_seconds) + ' ms' def button_press_event(self, event): x_rel, y_rel = self._get_relative_coords(event) if event.button == MouseButton.LEFT.value: self.draw_state.pan_x_start1 = x_rel if event.button == MouseButton.MIDDLE.value: if self.draw_state.selected_event_submit is not None: for i in self.draw_state.selected_event_submit: i.remove() self.draw_state.selected_event_submit = None if self.draw_state.selected_event_processing is not None: self.draw_state.selected_event_processing.remove() self.draw_state.selected_event_processing = None self.draw_state.selected_event_textbox.set_visible(False) if x_rel > 1 or x_rel < 0 or y_rel > 1 or y_rel < 0: plt.draw() return coord_x = self.draw_state.timeline_max - \ (1 - x_rel) * self.draw_state.timeline_width coord_y = self.draw_state.y_max - \ (1 - y_rel) * self.draw_state.y_height selected_event = self._find_closest_event(coord_x, coord_y) if selected_event is None: return if self.processed_data.tracking_execution: event_submit = selected_event.submit else: event_submit = selected_event self.draw_state.selected_event_submit = self.draw_state.ax.plot( event_submit.timestamp, event_submit.type_id, markersize=2 * self.draw_state.event_submit_markersize, color='g', marker='o', linestyle=' ') if self.processed_data.tracking_execution: self.draw_state.selected_event_processing = matplotlib.patches.Rectangle( (selected_event.start.timestamp, selected_event.submit.type_id - self.draw_state.event_processing_rect_height), selected_event.end.timestamp - selected_event.start.timestamp, 2 * self.draw_state.event_processing_rect_height, color='g') self.draw_state.ax.add_artist( self.draw_state.selected_event_processing) self.draw_state.selected_event_text = self.raw_data.registered_events_types[ event_submit.type_id].name + '\n' self.draw_state.selected_event_text += 'Submit: ' + \ PlotNordic._stringify_time( event_submit.timestamp) + '\n' if self.processed_data.tracking_execution: self.draw_state.selected_event_text += 'Processing start: ' + \ PlotNordic._stringify_time( selected_event.start.timestamp) + '\n' self.draw_state.selected_event_text += 'Processing end: ' + \ PlotNordic._stringify_time( selected_event.end.timestamp) + '\n' self.draw_state.selected_event_text += 'Processing time: ' + \ PlotNordic._stringify_time(selected_event.end.timestamp - \ selected_event.start.timestamp) + '\n' ev_type = self.raw_data.registered_events_types[ event_submit.type_id] for i in range(0, len(ev_type.data_descriptions)): if ev_type.data_descriptions[i] == 'mem_address': continue self.draw_state.selected_event_text += ev_type.data_descriptions[ i] + ' = ' self.draw_state.selected_event_text += str( event_submit.data[i]) + '\n' self.draw_state.selected_event_textbox.set_visible(True) self.draw_state.selected_event_textbox.set_text( self.draw_state.selected_event_text) plt.draw() if event.button == MouseButton.RIGHT.value: self.draw_state.pan_x_start2 = x_rel def button_release_event(self, event): x_rel, y_rel = self._get_relative_coords(event) if event.button == MouseButton.LEFT.value: if self.draw_state.paused: if abs(x_rel - self.draw_state.pan_x_start1) < 0.01: if self.draw_state.l_line is not None: self.draw_state.l_line.remove() self.draw_state.l_line = None self.draw_state.l_line_coord = None if x_rel >= 0 and x_rel <= 1 and y_rel >= 0 and y_rel <= 1: self.draw_state.l_line_coord = self.draw_state.timeline_max - \ (1 - x_rel) * self.draw_state.timeline_width self.draw_state.l_line = plt.axvline( self.draw_state.l_line_coord) plt.draw() else: self.draw_state.timeline_max = self.draw_state.timeline_max - \ (x_rel - self.draw_state.pan_x_start1) * \ self.draw_state.timeline_width self.draw_state.ax.set_xlim( self.draw_state.timeline_max - self.draw_state.timeline_width, self.draw_state.timeline_max) plt.draw() if event.button == MouseButton.RIGHT.value: if self.draw_state.paused: if abs(x_rel - self.draw_state.pan_x_start2) < 0.01: if self.draw_state.r_line is not None: self.draw_state.r_line.remove() self.draw_state.r_line = None self.draw_state.r_line_coord = None if x_rel >= 0 and x_rel <= 1 and y_rel >= 0 and y_rel <= 1: self.draw_state.r_line_coord = self.draw_state.timeline_max - \ (1 - x_rel) * self.draw_state.timeline_width self.draw_state.r_line = plt.axvline( self.draw_state.r_line_coord, color='r') plt.draw() if self.draw_state.r_line_coord is not None and self.draw_state.l_line_coord is not None: if self.draw_state.duration_marker is not None: self.draw_state.duration_marker.remove() bigger_coord = max(self.draw_state.r_line_coord, self.draw_state.l_line_coord) smaller_coord = min(self.draw_state.r_line_coord, self.draw_state.l_line_coord) self.draw_state.duration_marker = plt.annotate( s=PlotNordic._stringify_time(bigger_coord - smaller_coord), xy=(smaller_coord, 0.5), xytext=(bigger_coord, 0.5), arrowprops=dict(arrowstyle='<->')) else: if self.draw_state.duration_marker is not None: self.draw_state.duration_marker.remove() self.draw_state.duration_marker = None def real_time_close_event(self, event): self.finish_event.set() sys.exit() def animate_events_real_time(self, fig, selected_events_types, one_line): rects = [] finished = False events = [] xranges = [] for i in range(0, len(selected_events_types)): xranges.append([]) yranges = [] while not self.queue.empty(): event = self.queue.get() if event is None: self.logger.info("Stopped collecting new events") if self.processed_data.tracking_execution: if event.type_id == self.processed_data.event_processing_start_id: self.processed_data.start_event = event for i in range( len(self.processed_data.temp_events) - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.processed_data.temp_events[i].data[ 0] == self.processed_data.start_event.data[0]: self.processed_data.submit_event = self.processed_data.temp_events[ i] events.append(self.processed_data.temp_events[i]) self.submitted_event_type = self.processed_data.submit_event.type_id del (self.processed_data.temp_events[i]) break elif event.type_id == self.processed_data.event_processing_end_id: # comparing memory addresses of event processing start and # end to identify matching events if self.submitted_event_type is not None and event.data[0] \ == self.processed_data.start_event.data[0]: rects.append( matplotlib.patches.Rectangle( (self.processed_data.start_event.timestamp, self.processed_data.submit_event.type_id - self.draw_state.event_processing_rect_height / 2), event.timestamp - self.processed_data.start_event.timestamp, self.draw_state.event_processing_rect_height, edgecolor='black')) self.processed_data.tracked_events.append( TrackedEvent(self.processed_data.submit_event, self.processed_data.start_event, event)) self.submitted_event_type = None else: self.processed_data.temp_events.append(event) if event.timestamp > time.time() - self.start_time + self.draw_state.added_time - \ 0.2 * self.draw_state.timeline_width: self.draw_state.added_time += 0.05 if event.timestamp < time.time() - self.start_time + self.draw_state.added_time - \ 0.8 * self.draw_state.timeline_width: self.draw_state.added_time -= 0.05 events.append(event) else: events.append(event) self.raw_data.events.append(event) # translating plot if not self.draw_state.synchronized_with_events: # ignore translating plot for stale events if not self.draw_state.stale_events_displayed: self.draw_state.stale_events_displayed = True else: # translate plot for new events if len(events) != 0: self.draw_state.added_time = events[-1].timestamp - \ 0.3 * self.draw_state.timeline_width self.draw_state.synchronized_with_events = True if not self.draw_state.paused: self.draw_state.timeline_max = time.time() - self.start_time + \ self.draw_state.added_time self.draw_state.ax.set_xlim( self.draw_state.timeline_max - self.draw_state.timeline_width, self.draw_state.timeline_max) # plotting events y = list(map(lambda x: x.type_id, events)) x = list(map(lambda x: x.timestamp, events)) self.draw_state.ax.plot( x, y, marker='o', linestyle=' ', color='r', markersize=self.draw_state.event_submit_markersize) self.draw_state.ax.add_collection(PatchCollection(rects)) plt.gcf().canvas.flush_events() def plot_events_real_time(self, queue, finish_event, selected_events_types=None, one_line=False): self.start_time = time.time() self.queue = queue self.finish_event = finish_event self.raw_data = EventsData([], {}) self.raw_data.registered_events_types = queue.get() if selected_events_types is None: selected_events_types = list( self.raw_data.registered_events_types.keys()) fig = self._prepare_plot(selected_events_types) self.start_stop_ax = plt.axes([0.8, 0.025, 0.1, 0.04]) self.start_stop_button = Button(self.start_stop_ax, 'Start/Stop') self.start_stop_button.on_clicked(self.on_click_start_stop) plt.sca(self.draw_state.ax) fig.canvas.mpl_connect('close_event', self.real_time_close_event) ani = animation.FuncAnimation( fig, self.animate_events_real_time, fargs=[selected_events_types, one_line], interval=self.plot_config['refresh_time']) plt.show() def plot_events_from_file(self, selected_events_types=None, one_line=False): self.draw_state.paused = True if len(self.raw_data.events) == 0 or \ len(self.raw_data.registered_events_types) == 0: self.logger.error("Please read some events data before plotting") # default - print every event type if selected_events_types is None: selected_events_types = list( self.raw_data.registered_events_types.keys()) fig = self._prepare_plot(selected_events_types) events = list( filter( lambda x: x.type_id != self.processed_data. event_processing_start_id and x.type_id != self.processed_data. event_processing_end_id, self.raw_data.events)) y = list(map(lambda x: x.type_id, events)) x = list(map(lambda x: x.timestamp, events)) self.draw_state.ax.plot( x, y, marker='o', linestyle=' ', color='r', markersize=self.draw_state.event_submit_markersize) if self.processed_data.tracking_execution: rects = [] for i in range(0, len(self.raw_data.events)): if self.raw_data.events[ i].type_id == self.processed_data.event_processing_start_id: self.processed_data.start_event = self.raw_data.events[i] for j in range(i - 1, -1, -1): # comparing memory addresses of event processing start # and event submit to identify matching events if self.raw_data.events[j].data[ 0] == self.processed_data.start_event.data[0]: self.processed_data.submit_event = self.raw_data.events[ j] break # comparing memory addresses of event processing start and end # to identify matching events if self.raw_data.events[ i].type_id == self.processed_data.event_processing_end_id: if self.processed_data.submit_event is not None \ and self.raw_data.events[i].data[0] == self.processed_data.start_event.data[0]: rects.append( matplotlib.patches.Rectangle( (self.processed_data.start_event.timestamp, self.processed_data.submit_event.type_id - self.draw_state.event_processing_rect_height / 2), self.raw_data.events[i].timestamp - self.processed_data.start_event.timestamp, self.draw_state.event_processing_rect_height, edgecolor='black')) self.processed_data.tracked_events.append( TrackedEvent(self.processed_data.submit_event, self.processed_data.start_event, self.raw_data.events[i])) self.draw_state.ax.add_collection(PatchCollection(rects)) self.draw_state.timeline_max = max(x) + 1 self.draw_state.timeline_width = max(x) - min(x) + 2 self.draw_state.ax.set_xlim([min(x) - 1, max(x) + 1]) plt.draw() plt.show() def log_stats(self, log_filename): csvfile = open(log_filename + '.csv', 'w', newline='') self._log_events_counts(csvfile) self._log_processing_times(csvfile) csvfile.close() def _log_processing_times(self, log_file): log_file.write("#####EVENT PROCESSING TIMES [MS] #####\n") fieldnames = ['Type name:', 'Min:', 'Avg:', 'Max:', 'Std:'] wr = csv.DictWriter(log_file, delimiter=',', fieldnames=fieldnames) wr.writeheader() for i in self.raw_data.registered_events_types: if i == self.processed_data.event_processing_start_id or i == self.processed_data.event_processing_end_id: continue ev = list( filter(lambda x: x.submit.type_id == i, self.processed_data.tracked_events)) if len(ev) == 0: wr.writerow({ 'Type name:': self.raw_data.registered_events_types[i].name, 'Min:': '---', 'Avg:': '---', 'Max:': '---', 'Std:': '---' }) continue processing_times = list( map(lambda x: x.end.timestamp - x.start.timestamp, ev)) wr.writerow({ 'Type name:': self.raw_data.registered_events_types[i].name, 'Min:': '%.5f' % (1000 * min(processing_times)), 'Avg:': '%.5f' % (1000 * np.mean(processing_times)), 'Max:': '%.5f' % (1000 * max(processing_times)), 'Std:': '%.5f' % (1000 * np.std(processing_times)) }) log_file.write("\n\n") def _log_events_counts(self, log_file): log_file.write("#####EVENTS COUNTS#####\n") fieldnames = ['Type name:', 'Count:'] wr = csv.DictWriter(log_file, delimiter=',', fieldnames=fieldnames) wr.writeheader() for i in self.raw_data.registered_events_types: wr.writerow({ 'Type name:': self.raw_data.registered_events_types[i].name, 'Count:': self._count_event(i) }) log_file.write("\n\n") def _count_event(self, event_type_id): events_temp = list( filter(lambda x: x.type_id == event_type_id, self.raw_data.events)) return len(events_temp)
def main(): descr = "Merge data from Peripheral and Central. Synchronization events" \ " should be registered at the beginning and at the end of" \ " measurements (used to compensate clock drift)." parser = argparse.ArgumentParser(description=descr) parser.add_argument("peripheral_dataset", help="Name of Peripheral dataset") parser.add_argument("peripheral_sync_event", help="Event used for synchronization - Peripheral") parser.add_argument("central_dataset", help="Name of Central dataset") parser.add_argument("central_sync_event", help="Event used for synchronization - Central") parser.add_argument("result_dataset", help="Name for result dataset") args = parser.parse_args() evt_peripheral = EventsData([], {}) evt_peripheral.read_data_from_files(args.peripheral_dataset + ".csv", args.peripheral_dataset + ".json") evt_central = EventsData([], {}) evt_central.read_data_from_files(args.central_dataset + ".csv", args.central_dataset + ".json") # Compensating clock drift - based on synchronization events sync_evt_peripheral = evt_peripheral.get_event_type_id( args.peripheral_sync_event) sync_evt_central = evt_central.get_event_type_id(args.central_sync_event) sync_peripheral = list( filter(lambda x: x.type_id == sync_evt_peripheral, evt_peripheral.events)) sync_central = list( filter(lambda x: x.type_id == sync_evt_central, evt_central.events)) if len(sync_central) < 2 or len(sync_peripheral) < 2: print("Not enough synchronization events (require at least two)") return diff_start = sync_peripheral[0].timestamp - sync_central[0].timestamp diff_end = sync_peripheral[-1].timestamp - sync_central[-1].timestamp diff_time = sync_central[-1].timestamp - sync_central[0].timestamp time_start = sync_central[0].timestamp # Using linear approximation of clock drift between peripheral and central # t_central = t_peripheral + (diff_time * a) + b b = diff_start a = (diff_end - diff_start) / diff_time B_DIFF_THRESH = 0.1 if abs(diff_end - diff_start) > B_DIFF_THRESH: print("Clock drift difference between beginnning and end is high.") print("This could be caused by measurements missmatch or very long" \ " measurement time.") # Reindexing, renaming and compensating time differences for central events max_peripheral_id = max( [int(i) for i in evt_peripheral.registered_events_types]) evt_central.events = list( map( lambda x: Event(x.type_id + max_peripheral_id + 1, x.timestamp + a * (x.timestamp - time_start) + b, x.data), evt_central.events)) evt_central.registered_events_types = { k + max_peripheral_id + 1: EventType(v.name + "_central", v.data_types, v.data_descriptions) for k, v in evt_central.registered_events_types.items() } evt_peripheral.registered_events_types = { k: EventType(v.name + "_peripheral", v.data_types, v.data_descriptions) for k, v in evt_peripheral.registered_events_types.items() } all_registered_events_types = evt_peripheral.registered_events_types.copy() all_registered_events_types.update(evt_central.registered_events_types) result_events = EventsData(evt_peripheral.events + evt_central.events, all_registered_events_types) result_events.write_data_to_files(args.result_dataset + ".csv", args.result_dataset + ".json")
class RttNordicProfilerHost: def __init__(self, config=RttNordicConfig, finish_event=None, queue=None, event_filename=None, event_types_filename=None, log_lvl=logging.WARNING): self.event_filename = event_filename self.event_types_filename = event_types_filename self.config = config self.finish_event = finish_event self.queue = queue self.received_events = EventsData([], {}) self.timestamp_overflows = 0 self.after_half = False self.logger = logging.getLogger('RTT Profiler Host') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console) self._connect() def rtt_get_device_family(): with API.API(API.DeviceFamily.UNKNOWN) as api: api.connect_to_emu_without_snr() return api.read_device_family() def _connect(self): try: self.jlink = API.API(self.config['device_family']) except ValueError: self.logger.warning( 'Unrecognized device family. Trying to recognize automatically' ) self.config[ 'device_family'] = rtt_nordic_profiler_host.rtt_get_device_family( ) self.logger.info('Recognized device family: ' + self.config['device_family']) self.jlink = API.API(self.config['device_family']) self.jlink.open() if self.config['device_snr'] is not None: self.jlink.connect_to_emu_with_snr(self.config['device_snr']) else: self.jlink.connect_to_emu_without_snr() if self.config['reset_on_start']: self.jlink.sys_reset() self.jlink.go() self.jlink.rtt_start() time.sleep(1) # time required for initialization self.logger.info("Connected to device via RTT") def disconnect(self): if self.queue: self.queue.put(None) self.jlink.rtt_stop() self.jlink.disconnect_from_emu() self.jlink.close() if self.event_filename is not None and self.event_types_filename is not None: self.received_events.write_data_to_files(self.event_filename, self.event_types_filename) self.logger.info("Disconnected from device") def _read_char(self, channel): if self.config['connection_timeout'] > 0: start_time = time.time() buf = self.jlink.rtt_read(channel, 1, encoding='utf-8') while (len(buf) == 0): time.sleep(0.0001) buf = self.jlink.rtt_read(channel, 1, encoding='utf-8') if self.config['connection_timeout'] > 0 and time.time( ) - start_time > self.config['connection_timeout']: self.disconnect() self.logger.error("Connection timeout") sys.exit() return buf def _read_bytes(self, channel, num_bytes): if self.config['connection_timeout'] > 0: start_time = time.time() buf = self.jlink.rtt_read(channel, num_bytes, encoding=None) while (len(buf) < num_bytes): time.sleep(0.05) buf.extend( self.jlink.rtt_read(channel, num_bytes - len(buf), encoding=None)) if self.config['connection_timeout'] > 0 and time.time( ) - start_time > self.config['connection_timeout']: self.disconnect() self.logger.error("Connection timeout") sys.exit() if self.finish_event is not None and self.finish_event.is_set(): self.logger.info("Real time transmission closed") self.disconnect() sys.exit() return buf def _calculate_timestamp_from_clock_ticks(self, clock_ticks): return self.config['ms_per_timestamp_tick'] * ( clock_ticks + self.timestamp_overflows * self.config['timestamp_raw_max']) / 1000 def _read_single_event_description(self): buf = self._read_char(self.config['rtt_info_channel']) if ('\n' == buf): return None, None raw_desc = [] raw_desc.append(buf) while buf != '\n': buf = self._read_char(self.config['rtt_info_channel']) raw_desc.append(buf) raw_desc.pop() desc = "".join(raw_desc) desc_fields = desc.split(',') name = desc_fields[0] id = int(desc_fields[1]) data_type = [] for i in range(2, len(desc_fields) // 2 + 1): data_type.append(desc_fields[i]) data = [] for i in range(len(desc_fields) // 2 + 1, len(desc_fields)): data.append(desc_fields[i]) return id, EventType(name, data_type, data) def _read_all_events_descriptions(self): while True: id, et = self._read_single_event_description() if (id is None or et is None): break self.received_events.registered_events_types[id] = et def get_events_descriptions(self): self._send_command(Command.INFO) self._read_all_events_descriptions() if self.queue is not None: self.queue.put(self.received_events.registered_events_types) self.logger.info("Received events descriptions") self.logger.info("Ready to start logging events") def _read_single_event_rtt(self): id = int.from_bytes(self._read_bytes(self.config['rtt_data_channel'], 1), byteorder=self.config['byteorder'], signed=False) et = self.received_events.registered_events_types[id] buf = self._read_bytes(self.config['rtt_data_channel'], 4) timestamp_raw = (int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) if self.after_half and timestamp_raw < 0.2 * self.config[ 'timestamp_raw_max']: self.timestamp_overflows += 1 self.after_half = False if timestamp_raw > 0.6 * \ self.config['timestamp_raw_max'] and timestamp_raw < 0.9 * self.config['timestamp_raw_max']: self.after_half = True timestamp = self._calculate_timestamp_from_clock_ticks(timestamp_raw) data = [] for i in et.data_types: signum = False if i[0] == 's': signum = True buf = self._read_bytes(self.config['rtt_data_channel'], 4) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=signum)) return Event(id, timestamp, data) def read_events_rtt(self, time_seconds): self.start_logging_events() start_time = time.time() current_time = start_time while current_time - start_time < time_seconds or time_seconds < 0: event = self._read_single_event_rtt() self.received_events.events.append(event) if self.queue is not None: self.queue.put(event) current_time = time.time() self.stop_logging_events() def start_logging_events(self): self._send_command(Command.START) def stop_logging_events(self): self._send_command(Command.STOP) def _send_command(self, command_type): command = bytearray(1) command[0] = command_type.value self.jlink.rtt_write(self.config['rtt_command_channel'], command, None)
class RttNordicProfilerHost: def __init__(self, config=RttNordicConfig, finish_event=None, queue=None, event_filename=None, event_types_filename=None, log_lvl=logging.WARNING): self.event_filename = event_filename self.event_types_filename = event_types_filename self.config = config self.finish_event = finish_event self.queue = queue self.received_events = EventsData([], {}) self.timestamp_overflows = 0 self.after_half = False self.desc_buf = "" self.bufs = list() self.bcnt = 0 self.last_read_time = time.time() self.reading_data = True self.logger = logging.getLogger('RTT Profiler Host') self.logger_console = logging.StreamHandler() self.logger.setLevel(log_lvl) self.log_format = logging.Formatter( '[%(levelname)s] %(name)s: %(message)s') self.logger_console.setFormatter(self.log_format) self.logger.addHandler(self.logger_console) self.rtt_up_channels = { 'info': None, 'data': None, } self.rtt_down_channels = { 'command': None, } self.connect() @staticmethod def rtt_get_device_family(snr): family = None with API('UNKNOWN') as api: if snr is not None: api.connect_to_emu_with_snr(snr) else: api.connect_to_emu_without_snr() family = api.read_device_family() api.disconnect_from_emu() return family def connect(self): snr = self.config['device_snr'] device_family = RttNordicProfilerHost.rtt_get_device_family(snr) self.logger.info('Recognized device family: ' + device_family) self.jlink = API(device_family) self.jlink.open() if snr is not None: self.jlink.connect_to_emu_with_snr(self.config['device_snr']) else: self.jlink.connect_to_emu_without_snr() if self.config['reset_on_start']: self.jlink.sys_reset() self.jlink.go() self.jlink.rtt_start() TIMEOUT = 20 start_time = time.time() while not self.jlink.rtt_is_control_block_found(): if time.time() - start_time > TIMEOUT: self.logger.error("Cannot find RTT control block") sys.exit() time.sleep(0.2) while (None in list(self.rtt_up_channels.values())) or \ (None in list(self.rtt_down_channels.values())): down_channel_cnt, up_channel_cnt = self.jlink.rtt_read_channel_count( ) for idx in range(0, down_channel_cnt): chan_name, _ = self.jlink.rtt_read_channel_info( idx, 'DOWN_DIRECTION') try: label = self.config['rtt_down_channel_names'][chan_name] self.rtt_down_channels[label] = idx except KeyError: continue for idx in range(0, up_channel_cnt): chan_name, _ = self.jlink.rtt_read_channel_info( idx, 'UP_DIRECTION') try: label = self.config['rtt_up_channel_names'][chan_name] self.rtt_up_channels[label] = idx except KeyError: continue if time.time() - start_time > TIMEOUT: self.logger.error( "Cannot find properly configured RTT channels") sys.exit() time.sleep(0.2) self.logger.info("Connected to device via RTT") def shutdown(self): self.disconnect() self._read_remaining_events() if self.event_filename and self.event_types_filename: self.received_events.write_data_to_files(self.event_filename, self.event_types_filename) def disconnect(self): self.stop_logging_events() # read remaining data to buffer while True: try: buf = self.jlink.rtt_read(self.rtt_up_channels['data'], self.config['rtt_read_chunk_size'], encoding=None) except APIError: self.logger.error("Problem with reading RTT data.") buf = [] if len(buf) > 0: self.bufs.append(buf) self.bcnt += len(buf) else: break try: self.jlink.rtt_stop() self.jlink.disconnect_from_emu() self.jlink.close() except APIError: self.logger.error("JLink connection lost. Saving collected data.") return self.logger.info("Disconnected from device") def _get_buffered_data(self, num_bytes): buf = bytearray() while len(buf) < num_bytes: tbuf = self.bufs[0] size = num_bytes - len(buf) if len(tbuf) <= size: buf = buf + tbuf del self.bufs[0] else: buf = buf + tbuf[0:size] self.bufs[0] = tbuf[size:] self.bcnt -= num_bytes return buf def _read_bytes(self, num_bytes): now = time.time() while self.reading_data: if now - self.last_read_time < self.config['rtt_read_period'] \ and self.bcnt >= num_bytes: break try: buf = self.jlink.rtt_read(self.rtt_up_channels['data'], self.config['rtt_read_chunk_size'], encoding=None) except APIError: self.logger.error("Problem with reading RTT data.") self.shutdown() sys.exit() if len(buf) > 0: self.bufs.append(buf) self.bcnt += len(buf) if len(buf) > self.config['rtt_additional_read_thresh']: continue self.last_read_time = now if self.bcnt >= num_bytes: break if self.finish_event is not None and self.finish_event.is_set(): self.finish_event.clear() self.logger.info("Real time transmission closed") self.shutdown() self.logger.info("Events data saved to files") sys.exit() time.sleep(0.05) return self._get_buffered_data(num_bytes) def _calculate_timestamp_from_clock_ticks(self, clock_ticks): return self.config['ms_per_timestamp_tick'] * ( clock_ticks + self.timestamp_overflows * self.config['timestamp_raw_max']) / 1000 def _read_single_event_description(self): while '\n' not in self.desc_buf: try: buf_temp = self.jlink.rtt_read( self.rtt_up_channels['info'], self.config['rtt_read_chunk_size'], encoding='utf-8') except APIError: self.logger.error("Problem with reading RTT data.") self.shutdown() self.desc_buf += buf_temp time.sleep(0.1) desc = str(self.desc_buf[0:self.desc_buf.find('\n')]) # Empty field is send after last event description if len(desc) == 0: return None, None self.desc_buf = self.desc_buf[self.desc_buf.find('\n') + 1:] desc_fields = desc.split(',') name = desc_fields[0] id = int(desc_fields[1]) data_type = [] for i in range(2, len(desc_fields) // 2 + 1): data_type.append(desc_fields[i]) data = [] for i in range(len(desc_fields) // 2 + 1, len(desc_fields)): data.append(desc_fields[i]) return id, EventType(name, data_type, data) def _read_all_events_descriptions(self): while True: id, et = self._read_single_event_description() if (id is None or et is None): break self.received_events.registered_events_types[id] = et def get_events_descriptions(self): self._send_command(Command.INFO) self._read_all_events_descriptions() if self.queue is not None: self.queue.put(self.received_events.registered_events_types) self.logger.info("Received events descriptions") self.logger.info("Ready to start logging events") def _read_single_event_rtt(self): id = int.from_bytes(self._read_bytes(1), byteorder=self.config['byteorder'], signed=False) et = self.received_events.registered_events_types[id] buf = self._read_bytes(4) timestamp_raw = (int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) if self.after_half \ and timestamp_raw < 0.2 * self.config['timestamp_raw_max']: self.timestamp_overflows += 1 self.after_half = False if timestamp_raw > 0.6 * self.config['timestamp_raw_max']: if timestamp_raw < 0.9 * self.config['timestamp_raw_max']: self.after_half = True timestamp = self._calculate_timestamp_from_clock_ticks(timestamp_raw) def process_int32(self, data): buf = self._read_bytes(4) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=True)) def process_uint32(self, data): buf = self._read_bytes(4) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) def process_int16(self, data): buf = self._read_bytes(2) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=True)) def process_uint16(self, data): buf = self._read_bytes(2) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) def process_int8(self, data): buf = self._read_bytes(1) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=True)) def process_uint8(self, data): buf = self._read_bytes(1) data.append( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) def process_string(self, data): buf = self._read_bytes(1) buf = self._read_bytes( int.from_bytes(buf, byteorder=self.config['byteorder'], signed=False)) data.append(buf.decode()) READ_BYTES = { "u8": process_uint8, "s8": process_int8, "u16": process_uint16, "s16": process_int16, "u32": process_uint32, "s32": process_int32, "s": process_string, "t": process_uint32 } data = [] for event_data_type in et.data_types: READ_BYTES[event_data_type](self, data) return Event(id, timestamp, data) def _read_remaining_events(self): self.reading_data = False while self.bcnt != 0: event = self._read_single_event_rtt() self.received_events.events.append(event) if self.queue is not None: self.queue.put(event) # End of transmission if self.queue is not None: self.queue.put(None) def read_events_rtt(self, time_seconds): self.logger.info("Start logging events data") self.start_logging_events() start_time = time.time() current_time = start_time while current_time - start_time < time_seconds or time_seconds < 0: event = self._read_single_event_rtt() self.received_events.events.append(event) if self.queue is not None: self.queue.put(event) current_time = time.time() self.logger.info("Real time transmission closed") self.shutdown() self.logger.info("Events data saved to files") sys.exit() def start_logging_events(self): self._send_command(Command.START) def stop_logging_events(self): self._send_command(Command.STOP) def _send_command(self, command_type): command = bytearray(1) command[0] = command_type.value try: self.jlink.rtt_write(self.rtt_down_channels['command'], command, None) except APIError: self.logger.error("Problem with writing RTT data.")
def main(): descr = "Merge data from Peripheral and Central. Synchronization events" \ " should be registered at the beginning and at the end of" \ " measurements (used to compensate clock drift)." parser = argparse.ArgumentParser(description=descr) parser.add_argument("peripheral_dataset", help="Name of Peripheral dataset") parser.add_argument("peripheral_sync_event", help="Event used for synchronization - Peripheral") parser.add_argument("central_dataset", help="Name of Central dataset") parser.add_argument("central_sync_event", help="Event used for synchronization - Central") parser.add_argument("result_dataset", help="Name for result dataset") args = parser.parse_args() evt_peripheral = EventsData([], {}) evt_peripheral.read_data_from_files(args.peripheral_dataset + ".csv", args.peripheral_dataset + ".json") evt_central = EventsData([], {}) evt_central.read_data_from_files(args.central_dataset + ".csv", args.central_dataset + ".json") # Compensating clock drift - based on synchronization events sync_evt_peripheral = evt_peripheral.get_event_type_id( args.peripheral_sync_event) sync_evt_central = evt_central.get_event_type_id(args.central_sync_event) sync_peripheral = list(filter(lambda x: x.type_id == sync_evt_peripheral, evt_peripheral.events)) sync_central = list(filter(lambda x: x.type_id == sync_evt_central, evt_central.events)) ts_peripheral = list(map(lambda x: x.timestamp, evt_peripheral.events)) sync_ts_peripheral = list(map(lambda x: x.timestamp, sync_peripheral)) sync_ts_central = list(map(lambda x: x.timestamp, sync_central)) sync_diffs_central = np.subtract(sync_ts_central[1:], sync_ts_central[:-1]) sync_diffs_peripheral = np.subtract(sync_ts_peripheral[1:], sync_ts_peripheral[:-1]) rounded_diffs_central = list(map(lambda x: round(x, 1), sync_diffs_central)) rounded_diffs_peripheral = list(map(lambda x: round(x, 1), sync_diffs_peripheral)) shift_c = rounded_diffs_central.index(rounded_diffs_peripheral[0]) shift_p = rounded_diffs_peripheral.index(rounded_diffs_central[0]) if shift_c < shift_p: sync_ts_central = sync_ts_central[shift_c:] elif shift_p < shift_c: sync_ts_peripheral = sync_ts_peripheral[shift_p:] if len(sync_ts_central) < len(sync_ts_peripheral): sync_ts_peripheral = sync_ts_peripheral[:len(sync_ts_central)] elif len(sync_ts_peripheral) < len(sync_ts_central): sync_ts_central = sync_ts_central[:len(sync_ts_peripheral)] new_ts_peripheral = sync_peripheral_ts(ts_peripheral, sync_ts_peripheral, sync_ts_central) assert len(new_ts_peripheral) == len(ts_peripheral) # Reindexing, renaming and compensating time differences for peripheral events max_central_id = max([int(i) for i in evt_central.registered_events_types]) assert len(new_ts_peripheral) == len(evt_peripheral.events) evt_peripheral.events = list(map(lambda x, y: Event(x.type_id + max_central_id + 1, y, x.data), evt_peripheral.events, new_ts_peripheral)) evt_peripheral.registered_events_types = {k + max_central_id + 1 : EventType(v.name + "_peripheral", v.data_types, v.data_descriptions) for k, v in evt_peripheral.registered_events_types.items()} evt_central.registered_events_types = { k : EventType(v.name + "_central", v.data_types, v.data_descriptions) for k, v in evt_central.registered_events_types.items()} # Filter out events that are out of synchronization period TIME_DIFF = 0.5 start_time = sync_ts_central[0] - TIME_DIFF end_time = sync_ts_central[-1] + TIME_DIFF evt_peripheral.events = list(filter(lambda x: x.timestamp >= start_time and x.timestamp <= end_time, evt_peripheral.events)) evt_central.events = list(filter(lambda x: x.timestamp >= start_time and x.timestamp <= end_time, evt_central.events)) all_registered_events_types = evt_peripheral.registered_events_types.copy() all_registered_events_types.update(evt_central.registered_events_types) result_events = EventsData(evt_peripheral.events + evt_central.events, all_registered_events_types) result_events.write_data_to_files(args.result_dataset + ".csv", args.result_dataset + ".json") print('Profiler data merged successfully')
def main(): descr = "Merge data from device and dongle. ble_peer_events should be" \ " registered at the beginning and at the end of measurements" \ " (used to compensate clock drift)." parser = argparse.ArgumentParser(description=descr) parser.add_argument("device_dataset", help="Name of device dataset") parser.add_argument("dongle_dataset", help="Name of dongle dataset") parser.add_argument('result_dataset', help="Name for result dataset") args = parser.parse_args() evt_device = EventsData([], {}) evt_device.read_data_from_files(args.device_dataset + ".csv", args.device_dataset + ".json") evt_dongle = EventsData([], {}) evt_dongle.read_data_from_files(args.dongle_dataset + ".csv", args.dongle_dataset + ".json") # Compensating clock drift - based on ble_peer_event peer_evt_device = evt_device.get_event_type_id("ble_peer_event") peer_evt_dongle = evt_dongle.get_event_type_id("ble_peer_event") peer_device = list( filter(lambda x: x.type_id == peer_evt_device, evt_device.events)) peer_dongle = list( filter(lambda x: x.type_id == peer_evt_dongle, evt_dongle.events)) diff_start = peer_device[0].timestamp - peer_dongle[0].timestamp diff_end = peer_device[-1].timestamp - peer_dongle[-1].timestamp diff_time = peer_dongle[-1].timestamp - peer_dongle[0].timestamp time_start = peer_dongle[0].timestamp # Using linear approximation of clock drift between device and dongle # t_dongle = t_device + (diff_time * a) + b b = diff_start a = (diff_end - diff_start) / diff_time B_DIFF_THRESH = 0.1 if abs(diff_end - diff_start) > B_DIFF_THRESH: print("Clock drift difference between beginnning and end is high.") print("This could be caused by measurements missmatch or very long" \ " measurement time.") # Reindexing, renaming and compensating time differences for dongle events max_device_id = max([int(i) for i in evt_device.registered_events_types]) evt_dongle.events = list( map( lambda x: Event(x.type_id + max_device_id + 1, x.timestamp + a * (x.timestamp - time_start) + b, x.data), evt_dongle.events)) evt_dongle.registered_events_types = { k + max_device_id + 1: EventType(v.name + "_dongle", v.data_types, v.data_descriptions) for k, v in evt_dongle.registered_events_types.items() } evt_device.registered_events_types = { k: EventType(v.name + "_device", v.data_types, v.data_descriptions) for k, v in evt_device.registered_events_types.items() } all_registered_events_types = evt_device.registered_events_types.copy() all_registered_events_types.update(evt_dongle.registered_events_types) result_events = EventsData(evt_device.events + evt_dongle.events, all_registered_events_types) result_events.write_data_to_files(args.result_dataset + ".csv", args.result_dataset + ".json")