Example #1
0
    def transmit_all_events_descriptions(self):
        try:
            bytes = self.in_stream.recv_desc()
        except StreamError as err:
            self.logger.error("Receiving error: {}. Exiting".format(err))
            sys.exit()
        desc_buf = bytes.decode()
        f = StringIO(desc_buf)
        reader = csv.reader(f, delimiter=',')
        for row in reader:
            # Empty field is send after last event description
            if len(row) == 0:
                break
            name = row[0]
            id = int(row[1])
            data_type = row[2:len(row) // 2 + 1]
            data = row[len(row) // 2 + 1:]
            self.raw_data.registered_events_types[id] = EventType(name, data_type, data)
            if name not in ('event_processing_start', 'event_processing_end'):
                self.processed_events.registered_events_types[id] = EventType(name, data_type, data)

        self.event_processing_start_id = \
            self.raw_data.get_event_type_id('event_processing_start')
        self.event_processing_end_id = \
            self.raw_data.get_event_type_id('event_processing_end')

        if self.sending:
            event_types_dict = dict((k, v.serialize())
                    for k, v in self.processed_events.registered_events_types.items())
            json_et_string = json.dumps(event_types_dict)
            try:
                self.out_stream.send_desc(json_et_string.encode())
            except StreamError as err:
                self.logger.error("Error: {}. Unable to send data".format(err))
                sys.exit()
Example #2
0
    def transmit_all_events_descriptions(self):
        while True:
            try:
                bytes = self.stream.recv_desc()
                break
            except StreamError as err:
                if err.args[1] == StreamError.TIMEOUT_MSG:
                    if self.event_close.is_set():
                        self.logger.info(
                            "Module closed before receiving event descriptions."
                        )
                        sys.exit()
                    continue
                self.logger.error("Receiving error: {}. Exiting".format(err))
                sys.exit()
        desc_buf = bytes.decode()
        f = StringIO(desc_buf)
        reader = csv.reader(f, delimiter=',')
        for row in reader:
            # Empty field is sent after last event description
            if len(row) == 0:
                break
            name = row[0]
            id = int(row[1])
            data_type = row[2:len(row) // 2 + 1]
            data = row[len(row) // 2 + 1:]
            self.raw_data.registered_events_types[id] = EventType(
                name, data_type, data)
            if name not in ('event_processing_start', 'event_processing_end'):
                self.processed_events.registered_events_types[id] = EventType(
                    name, data_type, data)

        self.event_processing_start_id = \
            self.raw_data.get_event_type_id('event_processing_start')
        self.event_processing_end_id = \
            self.raw_data.get_event_type_id('event_processing_end')

        if self.sending:
            event_types_dict = dict(
                (k, v.serialize()) for k, v in
                self.processed_events.registered_events_types.items())
            json_et_string = json.dumps(event_types_dict)
            try:
                self.stream.send_desc(json_et_string.encode())
            except StreamError as err:
                self.logger.error(
                    "Sending error: {}. Cannot send descriptions.".format(err))
                sys.exit()
    def _read_single_event_description(self):
        while '\n' not in self.desc_buf:
            try:
                buf_temp = self.jlink.rtt_read(
                    self.config['rtt_info_channel'],
                    self.config['rtt_read_chunk_size'],
                    encoding='utf-8')

            except:
                self.logger.error("Problem with reading RTT data.")
                self.shutdown()

            self.desc_buf += buf_temp
            time.sleep(0.1)

        desc = str(self.desc_buf[0:self.desc_buf.find('\n')])
        # Empty field is send after last event description
        if len(desc) == 0:
            return None, None
        self.desc_buf = self.desc_buf[self.desc_buf.find('\n') + 1:]

        desc_fields = desc.split(',')

        name = desc_fields[0]
        id = int(desc_fields[1])
        data_type = []
        for i in range(2, len(desc_fields) // 2 + 1):
            data_type.append(desc_fields[i])
        data = []
        for i in range(len(desc_fields) // 2 + 1, len(desc_fields)):
            data.append(desc_fields[i])
        return id, EventType(name, data_type, data)
Example #4
0
 def _read_events_types_json(self, filename):
     try:
         with open(filename, "r") as rd:
             data = json.load(rd)
     except IOError:
         self.logger.error("Problem with accessing file: " + filename)
         sys.exit()
     csv_hash = data['csv_hash']
     del data['csv_hash']
     self.registered_events_types = dict((int(k), EventType.deserialize(v))
                                                  for k, v in data.items())
     return csv_hash
Example #5
0
    def plot_events_real_time(self, selected_events_types=None):
        self.start_time = time.time()
        #Receive event descriptions
        while True:
            try:
                bytes = self.in_stream.recv_desc()
                break
            except StreamError as err:
                if err.args[1] == StreamError.TIMEOUT_MSG:
                    if self.event_close.is_set():
                        self.logger.info("Module closed before receiving event descriptions.")
                        sys.exit()
                    continue
                self.logger.error("Receiving error: {}. Exiting".format(err))
                sys.exit()
        data_str = bytes.decode()
        event_types_dict = json.loads(data_str)
        self.processed_events.registered_events_types = dict((int(k), EventType.deserialize(v))
                                                             for k, v in event_types_dict.items())
        if self.processed_events.registered_events_types is None:
            self.logger.error("Event descriptors not sent properly")
            sys.exit()
        if selected_events_types is None:
            selected_events_types = list(
                self.processed_events.registered_events_types.keys())

        fig = self._prepare_plot(selected_events_types)

        self.start_stop_ax = plt.axes([0.8, 0.025, 0.1, 0.04])
        self.start_stop_button = Button(self.start_stop_ax, 'Start/Stop')
        self.start_stop_button.on_clicked(self.on_click_start_stop)
        plt.sca(self.draw_state.ax)

        self.ani = animation.FuncAnimation(
            fig,
            self.animate_events_real_time,
            interval=self.plot_config['refresh_time'])
        plt.show()
    def _read_single_event_description(self):
        buf = self._read_char(self.config['rtt_info_channel'])
        if ('\n' == buf):
            return None, None

        raw_desc = []
        raw_desc.append(buf)
        while buf != '\n':
            buf = self._read_char(self.config['rtt_info_channel'])
            raw_desc.append(buf)
        raw_desc.pop()

        desc = "".join(raw_desc)
        desc_fields = desc.split(',')

        name = desc_fields[0]
        id = int(desc_fields[1])
        data_type = []
        for i in range(2, len(desc_fields) // 2 + 1):
            data_type.append(desc_fields[i])
        data = []
        for i in range(len(desc_fields) // 2 + 1, len(desc_fields)):
            data.append(desc_fields[i])
        return id, EventType(name, data_type, data)
Example #7
0
def main():
    descr = "Merge data from Peripheral and Central. Synchronization events" \
            " should be registered at the beginning and at the end of" \
            " measurements (used to compensate clock drift)."
    parser = argparse.ArgumentParser(description=descr)
    parser.add_argument("peripheral_dataset", help="Name of Peripheral dataset")
    parser.add_argument("peripheral_sync_event",
                        help="Event used for synchronization - Peripheral")
    parser.add_argument("central_dataset", help="Name of Central dataset")
    parser.add_argument("central_sync_event",
                        help="Event used for synchronization - Central")
    parser.add_argument("result_dataset", help="Name for result dataset")
    args = parser.parse_args()

    evt_peripheral = ProcessedEvents()
    evt_peripheral.read_data_from_files(args.peripheral_dataset + ".csv",
                                        args.peripheral_dataset + ".json")

    evt_central = ProcessedEvents()
    evt_central.read_data_from_files(args.central_dataset + ".csv",
                                     args.central_dataset + ".json")

    # Compensating clock drift - based on synchronization events
    sync_evt_peripheral = evt_peripheral.get_event_type_id(args.peripheral_sync_event)
    sync_evt_central = evt_central.get_event_type_id(args.central_sync_event)

    sync_peripheral = list(filter(lambda x: x.submit.type_id == sync_evt_peripheral,
                                  evt_peripheral.tracked_events))

    sync_central = list(filter(lambda x: x.submit.type_id == sync_evt_central,
                               evt_central.tracked_events))

    ts_peripheral = np.array(list(map(lambda x: list([x.submit.timestamp, x.proc_start_time, x.proc_end_time]),
                                      evt_peripheral.tracked_events)))
    sync_ts_peripheral = list(map(lambda x: x.submit.timestamp, sync_peripheral))
    sync_ts_central = list(map(lambda x: x.submit.timestamp, sync_central))

    sync_diffs_central = np.subtract(sync_ts_central[1:], sync_ts_central[:-1])
    sync_diffs_peripheral = np.subtract(sync_ts_peripheral[1:], sync_ts_peripheral[:-1])

    rounded_diffs_central = list(map(lambda x: round(x, 1), sync_diffs_central))
    rounded_diffs_peripheral = list(map(lambda x: round(x, 1), sync_diffs_peripheral))

    shift_c = rounded_diffs_central.index(rounded_diffs_peripheral[0])
    shift_p = rounded_diffs_peripheral.index(rounded_diffs_central[0])

    if shift_c < shift_p:
        sync_ts_central = sync_ts_central[shift_c:]
    elif shift_p < shift_c:
        sync_ts_peripheral = sync_ts_peripheral[shift_p:]

    if len(sync_ts_central) < len(sync_ts_peripheral):
        sync_ts_peripheral = sync_ts_peripheral[:len(sync_ts_central)]
    elif len(sync_ts_peripheral) < len(sync_ts_central):
        sync_ts_central = sync_ts_central[:len(sync_ts_peripheral)]

    new_ts_peripheral = ts_peripheral.copy()
    new_ts_peripheral[list(list(elem is not None for elem in row) for row in new_ts_peripheral)] = \
        sync_peripheral_ts(ts_peripheral, sync_ts_peripheral, sync_ts_central)
    assert len(new_ts_peripheral) == len(ts_peripheral)
    # Reindexing, renaming and compensating time differences for peripheral events
    max_central_id = max([int(i) for i in evt_central.registered_events_types])

    assert len(new_ts_peripheral) == len(evt_peripheral.tracked_events)
    evt_peripheral.tracked_events = list(map(lambda x, y:
                                         TrackedEvent(Event(x.submit.type_id + max_central_id + 1, y[0], x.submit.data),
                                                      y[1],
                                                      y[2]),
                                             evt_peripheral.tracked_events, new_ts_peripheral))

    evt_peripheral.registered_events_types = {k + max_central_id + 1 :
                EventType(v.name + "_peripheral", v.data_types, v.data_descriptions)
                for k, v in evt_peripheral.registered_events_types.items()}

    evt_central.registered_events_types = {
                k : EventType(v.name + "_central",
                              v.data_types, v.data_descriptions)
                for k, v in evt_central.registered_events_types.items()}

    # Filter out events that are out of synchronization period
    TIME_DIFF = 0.5
    start_time = sync_ts_central[0] - TIME_DIFF
    end_time = sync_ts_central[-1] + TIME_DIFF
    evt_peripheral.tracked_events = list(filter(lambda x:
        x.submit.timestamp >= start_time and (x.proc_end_time <= end_time if x.proc_end_time is not None \
            else x.submit.timestamp <= end_time),
        evt_peripheral.tracked_events))

    evt_central.tracked_events = list(filter(lambda x:
        x.submit.timestamp >= start_time and (x.proc_end_time <= end_time if x.proc_end_time is not None \
            else x.submit.timestamp <= end_time),
        evt_central.tracked_events))

    # Filter out events that were out of interpolation range
    evt_peripheral.tracked_events = list(filter(lambda x:
        INTERP_OUT_OF_RANGE_VAL not in (x.submit.timestamp, x.proc_start_time, x.proc_end_time),
        evt_peripheral.tracked_events))

    all_registered_events_types = evt_peripheral.registered_events_types.copy()
    all_registered_events_types.update(evt_central.registered_events_types)

    result_events = ProcessedEvents()
    result_events.tracked_events = evt_peripheral.tracked_events + evt_central.tracked_events
    result_events.registered_events_types = all_registered_events_types

    result_events.write_data_to_files(args.result_dataset + ".csv",
                                      args.result_dataset + ".json")

    print('Profiler data merged successfully')
def main():
    descr = "Merge data from Peripheral and Central. Synchronization events" \
            " should be registered at the beginning and at the end of" \
            " measurements (used to compensate clock drift)."
    parser = argparse.ArgumentParser(description=descr)
    parser.add_argument("peripheral_dataset",
                        help="Name of Peripheral dataset")
    parser.add_argument("peripheral_sync_event",
                        help="Event used for synchronization - Peripheral")
    parser.add_argument("central_dataset", help="Name of Central dataset")
    parser.add_argument("central_sync_event",
                        help="Event used for synchronization - Central")
    parser.add_argument("result_dataset", help="Name for result dataset")
    args = parser.parse_args()

    evt_peripheral = EventsData([], {})
    evt_peripheral.read_data_from_files(args.peripheral_dataset + ".csv",
                                        args.peripheral_dataset + ".json")

    evt_central = EventsData([], {})
    evt_central.read_data_from_files(args.central_dataset + ".csv",
                                     args.central_dataset + ".json")

    # Compensating clock drift - based on synchronization events
    sync_evt_peripheral = evt_peripheral.get_event_type_id(
        args.peripheral_sync_event)
    sync_evt_central = evt_central.get_event_type_id(args.central_sync_event)

    sync_peripheral = list(
        filter(lambda x: x.type_id == sync_evt_peripheral,
               evt_peripheral.events))
    sync_central = list(
        filter(lambda x: x.type_id == sync_evt_central, evt_central.events))

    if len(sync_central) < 2 or len(sync_peripheral) < 2:
        print("Not enough synchronization events (require at least two)")
        return

    diff_start = sync_peripheral[0].timestamp - sync_central[0].timestamp
    diff_end = sync_peripheral[-1].timestamp - sync_central[-1].timestamp
    diff_time = sync_central[-1].timestamp - sync_central[0].timestamp
    time_start = sync_central[0].timestamp

    # Using linear approximation of clock drift between peripheral and central
    # t_central = t_peripheral + (diff_time * a) + b

    b = diff_start
    a = (diff_end - diff_start) / diff_time

    B_DIFF_THRESH = 0.1
    if abs(diff_end - diff_start) > B_DIFF_THRESH:
        print("Clock drift difference between beginnning and end is high.")
        print("This could be caused by measurements missmatch or very long" \
              " measurement time.")

    # Reindexing, renaming and compensating time differences for central events
    max_peripheral_id = max(
        [int(i) for i in evt_peripheral.registered_events_types])

    evt_central.events = list(
        map(
            lambda x: Event(x.type_id + max_peripheral_id + 1, x.timestamp + a
                            * (x.timestamp - time_start) + b, x.data),
            evt_central.events))

    evt_central.registered_events_types = {
        k + max_peripheral_id + 1: EventType(v.name + "_central", v.data_types,
                                             v.data_descriptions)
        for k, v in evt_central.registered_events_types.items()
    }

    evt_peripheral.registered_events_types = {
        k: EventType(v.name + "_peripheral", v.data_types, v.data_descriptions)
        for k, v in evt_peripheral.registered_events_types.items()
    }

    all_registered_events_types = evt_peripheral.registered_events_types.copy()
    all_registered_events_types.update(evt_central.registered_events_types)

    result_events = EventsData(evt_peripheral.events + evt_central.events,
                               all_registered_events_types)

    result_events.write_data_to_files(args.result_dataset + ".csv",
                                      args.result_dataset + ".json")
Example #9
0
def main():
    descr = "Merge data from device and dongle. ble_peer_events should be" \
            " registered at the beginning and at the end of measurements"  \
            " (used to compensate clock drift)."
    parser = argparse.ArgumentParser(description=descr)
    parser.add_argument("device_dataset", help="Name of device dataset")
    parser.add_argument("dongle_dataset", help="Name of dongle dataset")
    parser.add_argument('result_dataset', help="Name for result dataset")
    args = parser.parse_args()

    evt_device = EventsData([], {})
    evt_device.read_data_from_files(args.device_dataset + ".csv",
                                    args.device_dataset + ".json")

    evt_dongle = EventsData([], {})
    evt_dongle.read_data_from_files(args.dongle_dataset + ".csv",
                                    args.dongle_dataset + ".json")

    # Compensating clock drift - based on ble_peer_event
    peer_evt_device = evt_device.get_event_type_id("ble_peer_event")
    peer_evt_dongle = evt_dongle.get_event_type_id("ble_peer_event")

    peer_device = list(
        filter(lambda x: x.type_id == peer_evt_device, evt_device.events))
    peer_dongle = list(
        filter(lambda x: x.type_id == peer_evt_dongle, evt_dongle.events))

    diff_start = peer_device[0].timestamp - peer_dongle[0].timestamp
    diff_end = peer_device[-1].timestamp - peer_dongle[-1].timestamp
    diff_time = peer_dongle[-1].timestamp - peer_dongle[0].timestamp
    time_start = peer_dongle[0].timestamp

    # Using linear approximation of clock drift between device and dongle
    # t_dongle = t_device + (diff_time * a) + b

    b = diff_start
    a = (diff_end - diff_start) / diff_time

    B_DIFF_THRESH = 0.1
    if abs(diff_end - diff_start) > B_DIFF_THRESH:
        print("Clock drift difference between beginnning and end is high.")
        print("This could be caused by measurements missmatch or very long" \
              " measurement time.")

    # Reindexing, renaming and compensating time differences for dongle events
    max_device_id = max([int(i) for i in evt_device.registered_events_types])

    evt_dongle.events = list(
        map(
            lambda x: Event(x.type_id + max_device_id + 1, x.timestamp + a *
                            (x.timestamp - time_start) + b, x.data),
            evt_dongle.events))

    evt_dongle.registered_events_types = {
        k + max_device_id + 1: EventType(v.name + "_dongle", v.data_types,
                                         v.data_descriptions)
        for k, v in evt_dongle.registered_events_types.items()
    }

    evt_device.registered_events_types = {
        k: EventType(v.name + "_device", v.data_types, v.data_descriptions)
        for k, v in evt_device.registered_events_types.items()
    }

    all_registered_events_types = evt_device.registered_events_types.copy()
    all_registered_events_types.update(evt_dongle.registered_events_types)

    result_events = EventsData(evt_device.events + evt_dongle.events,
                               all_registered_events_types)

    result_events.write_data_to_files(args.result_dataset + ".csv",
                                      args.result_dataset + ".json")