def __init__(self, in_file_directory, in_file_name, in_extension, in_delimiter, in_missing_value, in_quote, out_file_directory, out_file_name, out_extension, out_delimiter, out_missing_value, out_file_single_file, out_file_separate_line, raw_data_structure, clean_data_structure ): # initializing file handlers for in and out files self.in_file_handler = FileHandler (in_file_directory, in_file_name, in_extension, in_delimiter) if out_file_single_file == 'on': if out_file_separate_line == 'title': out_file_separate_line = in_file_name self.out_file_handler = FileHandler (out_file_directory, out_file_name, out_extension, out_delimiter, out_file_single_file, out_file_separate_line) # initializing data handler for raw and clean data self.raw_data_handler = DataHandler() self.raw_data_handler.structure = raw_data_structure self.clean_data_handler = DataHandler() self.clean_data_handler.structure = clean_data_structure # initialize the error message string self.error_message = '' self.unquote = False if in_quote == 'yes': self.unquote = True self.in_missing_value = in_missing_value self.out_missing_value = out_missing_value
def __init__(self, dh=None, kernel_cols=None): if dh is None: self.dh = DataHandler() else: self.dh = dh if kernel_cols is None: self.kernel_cols = "all" else: self.kernel_cols = kernel_cols logging.info("Initializing") self.kde = None
def __init__(self, config): self.config = config print("OSC Address: " + str(self.config.OSC_ADDRESS)) print("OSC Port: " + str(self.config.OSC_PORT)) print() self.data_handler = DataHandler(self.config) self.bluetooth = Bluetooth(self.config.MESSAGE_DELAY) self.myos = [] self.myo_to_connect = None self.scanning = False # Add handlers for expected events self.set_handlers()
def __init__(self, dh=None): logging.info("Let's plot some stuff!") self.conn = get_postgis_conn() if dh is None: self.dh = DataHandler() else: self.dh = dh
class KernelModel: def __init__(self, dh=None, kernel_cols=None): if dh is None: self.dh = DataHandler() else: self.dh = dh if kernel_cols is None: self.kernel_cols = "all" else: self.kernel_cols = kernel_cols logging.info("Initializing") self.kde = None def train_model(self): if self.kernel_cols != "all": vals = self.dh.full_df[self.kernel_cols].values.T else: vals = self.dh.get_density_df().values.T self.kde = stats.gaussian_kde(vals) logging.info("Trained a kde") """ Assumes a preprocessed df sent in and predicts, for each data point, a density prediction. """ def get_densities(self, df=None): if self.kde is None: logging.info("No trained kde, training now. ") self.train_model() if df is None: df = self.dh.full_df if self.kernel_cols != "all": vals = df[self.kernel_cols].values.T else: vals = df.values.T return self.kde.evaluate(vals)
def map_data ( self, file_directory, file_name, file_extension, file_delimiter, replace_ids, kept_id_position, lost_id_position, target_positions, drop_unreferenced_entries, target_unreferenced_entries, drop_ghosts, remove_duplicates, target_duplicates_set, merge_entries, target_merge_set, commands ): # 1 - initialize file handler mapper_file_handler = FileHandler(file_directory, file_name, file_extension, file_delimiter) # TODO # if not self.mergerFileHandler.check_valid(): # stop_system(_mergFileHandler.error_message) # 2 - import merge info from file mapper_file_handler.import_data(False) # 3 - initialize data handler mapper_data_handler = DataHandler() # 4 - transfer data mapper_data_handler.data = list(mapper_file_handler.data) # 5 - prepare positions from names # target positions target_position_index = [] if target_positions != 'off': target_position_index = self.get_indeces_from_clean_structure (target_positions) # unreferenced entities target_unreferenced_entries_index = [] if drop_unreferenced_entries != 'off': target_unreferenced_entries_index = self.get_indeces_from_clean_structure (target_unreferenced_entries) # duplicate defining set positions target_duplicates_set_index = [] if remove_duplicates != 'off': target_duplicates_set_index = self.get_indeces_from_clean_structure (target_duplicates_set) # merge defining set positions target_merge_set_index = [] if merge_entries != 'off': target_merge_set_index = self.get_indeces_from_clean_structure (target_merge_set) # 6 - get the translated commands commands = self.get_commands(commands, target_merge_set_index) # 7 - initialize a mapper with all info mapper = Mapper(list(self.clean_data_handler.data_final), mapper_data_handler.data, replace_ids, kept_id_position, lost_id_position, target_position_index, drop_unreferenced_entries, target_unreferenced_entries_index, drop_ghosts, remove_duplicates, target_duplicates_set_index, merge_entries, target_merge_set_index, commands) return [list(mapper.mapped_data), mapper.str_counter_ids]
class Analyzer: # ------------------------------------------------------------- # # __init__ (in_file_directory, in_file_name, # in_extension, in_delimiter, in_missing_value, # in_quote, # out_file_directory, out_file_name, # out_extension, out_delimiter, # out_missing_value, # raw_data_structure, clean_data_structure) # # ------------------------------------------------------------- def __init__(self, in_file_directory, in_file_name, in_extension, in_delimiter, in_missing_value, in_quote, out_file_directory, out_file_name, out_extension, out_delimiter, out_missing_value, out_file_single_file, out_file_separate_line, raw_data_structure, clean_data_structure ): # initializing file handlers for in and out files self.in_file_handler = FileHandler (in_file_directory, in_file_name, in_extension, in_delimiter) if out_file_single_file == 'on': if out_file_separate_line == 'title': out_file_separate_line = in_file_name self.out_file_handler = FileHandler (out_file_directory, out_file_name, out_extension, out_delimiter, out_file_single_file, out_file_separate_line) # initializing data handler for raw and clean data self.raw_data_handler = DataHandler() self.raw_data_handler.structure = raw_data_structure self.clean_data_handler = DataHandler() self.clean_data_handler.structure = clean_data_structure # initialize the error message string self.error_message = '' self.unquote = False if in_quote == 'yes': self.unquote = True self.in_missing_value = in_missing_value self.out_missing_value = out_missing_value # ------------------------------------------------------------- # # check_valid () # # ------------------------------------------------------------- def check_valid(self): valid = True if not self.in_file_handler.check_valid(): self.error_message = self.error_message + self.in_file_handler.error_message valid = False if not self.out_file_handler.check_valid(): self.error_message = self.error_message + self.out_file_handler.error_message valid = False if not self.raw_data_handler.check_valid(): self.error_message = self.error_message + self.raw_data_handler.error_message valid = False if not self.clean_data_handler.check_valid(): self.error_message = self.error_message + self.clean_data_handler.error_message valid = False return valid # ------------------------------------------------------------- # # structure_raw_data () # # ------------------------------------------------------------- def structure_raw_data(self): # 1- import data from file self.in_file_handler.import_data(False, self.in_missing_value) # If True show first 1000, if FALSE all dataset self.in_file_handler.data = filter(None, self.in_file_handler.data) if self.unquote == True: self.in_file_handler.unquote_data() # 2- transfer data to data handler self.raw_data_handler.data = list(self.in_file_handler.data) # 3- structure the data self.raw_data_handler.structure_data() # ------------------------------------------------------------- # # generate_clean_data () # # ------------------------------------------------------------- def generate_clean_data(self): # 1- transfer data to clean data handler self.clean_data_handler.data = list(self.raw_data_handler.data_final) # 2- clean the data given the raw data structure self.clean_data_handler.clean_data(self.raw_data_handler.structure) # ------------------------------------------------------------- # # save_clean_data () # # ------------------------------------------------------------- def save_clean_data(self): # 1- transfer clean data to file handler self.out_file_handler.data = list(self.clean_data_handler.data_final) # 2- save the data id_list = [] for line in self.clean_data_handler.data_final : #line = line.split(',') id_list.append(line[0]) id_list.append(line[1]) self.out_file_handler.write_data_file(self.out_missing_value) # ------------------------------------------------------------- # # map_data (file_directory, file_name, # file_extension, file_delimiter, # kept_id_position, lost_id_position, target_positions, # drop_unreferenced_entries, target_unreferenced_entries, # drop_ghosts, # remove_duplicates, target_duplicates_set, # merge_entries, target_merge_set, commands ) # # ------------------------------------------------------------- def map_data ( self, file_directory, file_name, file_extension, file_delimiter, replace_ids, kept_id_position, lost_id_position, target_positions, drop_unreferenced_entries, target_unreferenced_entries, drop_ghosts, remove_duplicates, target_duplicates_set, merge_entries, target_merge_set, commands ): # 1 - initialize file handler mapper_file_handler = FileHandler(file_directory, file_name, file_extension, file_delimiter) # TODO # if not self.mergerFileHandler.check_valid(): # stop_system(_mergFileHandler.error_message) # 2 - import merge info from file mapper_file_handler.import_data(False) # 3 - initialize data handler mapper_data_handler = DataHandler() # 4 - transfer data mapper_data_handler.data = list(mapper_file_handler.data) # 5 - prepare positions from names # target positions target_position_index = [] if target_positions != 'off': target_position_index = self.get_indeces_from_clean_structure (target_positions) # unreferenced entities target_unreferenced_entries_index = [] if drop_unreferenced_entries != 'off': target_unreferenced_entries_index = self.get_indeces_from_clean_structure (target_unreferenced_entries) # duplicate defining set positions target_duplicates_set_index = [] if remove_duplicates != 'off': target_duplicates_set_index = self.get_indeces_from_clean_structure (target_duplicates_set) # merge defining set positions target_merge_set_index = [] if merge_entries != 'off': target_merge_set_index = self.get_indeces_from_clean_structure (target_merge_set) # 6 - get the translated commands commands = self.get_commands(commands, target_merge_set_index) # 7 - initialize a mapper with all info mapper = Mapper(list(self.clean_data_handler.data_final), mapper_data_handler.data, replace_ids, kept_id_position, lost_id_position, target_position_index, drop_unreferenced_entries, target_unreferenced_entries_index, drop_ghosts, remove_duplicates, target_duplicates_set_index, merge_entries, target_merge_set_index, commands) return [list(mapper.mapped_data), mapper.str_counter_ids] # ------------------------------------------------------------- # # get_index_from_clean_structure (position) # # ------------------------------------------------------------- def get_indeces_from_clean_structure (self, positions): admitted_all_expression = ['all', 'All', ' ', ''] indeces = [] if positions in admitted_all_expression: # take all positions indeces = range(len(self.clean_data_handler.structure)/2) else: # remove spaces before or after the ',' positions = positions.replace(', ',',') positions = positions.replace(' ,',',') list_positions = positions.split(',') for position in list_positions: if position in self.clean_data_handler.structure: index = [i for i in range(len(self.clean_data_handler.structure)) if self.clean_data_handler.structure[i] == position][0]/2 indeces.append(index) else: sys.exit("ERROR: variable {0} not found in the clean data structure".format(position)) return indeces # ------------------------------------------------------------- # # get_commands (commands, other_fields) # # ------------------------------------------------------------- def get_commands (self, commands, other_fields): final_commands = dict() admitted_command_expression = ['+', 'same', 'avg'] list_commands = commands.replace(', ',',') list_commands = list_commands.replace(' ,',',') list_commands = list_commands.split(',') for command in list_commands: command = command.replace(' :',':') command = command.replace(': ',':') command = command.split(':') if command[0] in self.clean_data_handler.structure: index = [i for i in range(len(self.clean_data_handler.structure)) if self.clean_data_handler.structure[i] == command[0]][0]/2 if command[1] in admitted_command_expression: final_commands[str(index)] = command[1] else: sys.exit("ERROR: Command \'{0}\' not admitted".format(command[1])) else: sys.exit("ERROR: In the Command list:\n variable \'{0}\' not found in the clean data structure".format(command[0])) # check if fields in the command are not in other_fields (fields used for merging recognition) for i in range(len(self.clean_data_handler.structure)/2): if str(i) not in final_commands: if i not in other_fields: final_commands[str(i)]='same' else: if i in other_fields: sys.exit("ERROR: In the Command list:\n variable \'{0}\' given for the merging set and the command set".format(command[0])) return final_commands # ------------------------------------------------------------- # # get_clean_data () # # ------------------------------------------------------------- def get_clean_data(self): consist_data = self.clean_data_handler.data_final return consist_data # ------------------------------------------------------------- # # get_merged_data () # # ------------------------------------------------------------- def get_merged_data(self): return self.Merger.data_merged
class MyoDriver: """ Responsible for myo connections and messages. """ def __init__(self, config): self.config = config print("OSC Address: " + str(self.config.OSC_ADDRESS)) print("OSC Port: " + str(self.config.OSC_PORT)) print() self.data_handler = DataHandler(self.config) self.bluetooth = Bluetooth(self.config.MESSAGE_DELAY) self.myos = [] self.myo_to_connect = None self.scanning = False # Add handlers for expected events self.set_handlers() def run(self): """ Main. Disconnects possible connections and starts as many connections as needed. """ self.disconnect_all() while len(self.myos) < self.config.MYO_AMOUNT: print( "*** Connecting myo " + str(len(self.myos) + 1) + " out of " + str(self.config.MYO_AMOUNT) + " ***") print() self.add_myo_connection() self.receive() def receive(self): self.bluetooth.receive() ############################################################################## # CONNECT # ############################################################################## def add_myo_connection(self): """ Procedure for connection with the Myo Armband. Scans, connects, disables sleep and starts EMG stream. """ # Discover self._print_status("Scanning") self.bluetooth.gap_discover() # Await myo detection and create Myo object. self.scanning = True while self.myo_to_connect is None: self.bluetooth.receive() # End gap self.bluetooth.end_gap() # Add handlers self.bluetooth.add_connection_status_handler(self.create_connection_status_handle(self.myo_to_connect)) self.bluetooth.add_disconnected_handler(self.create_disconnect_handle(self.myo_to_connect)) # Direct connection. Reconnect implements the retry procedure. self.myos.append(self.myo_to_connect) self.connect_and_retry(self.myo_to_connect, self.config.RETRY_CONNECTION_AFTER, self.config.MAX_RETRIES) self.myo_to_connect = None def connect_and_retry(self, myo, timeout=None, max_retries=None): """ Procedure for a reconnection. :param myo: Myo object to connect. Should have its address set :param timeout: Time to wait for response :param max_retries: Max retries before exiting the program :return: True if connection was successful, false otherwise. """ retries = 0 # The subroutine will await the response until timeout is met while not self.direct_connect(myo, timeout) and not myo.connected: retries += 1 if max_retries is not None and retries > max_retries: print("Max retries reached. Exiting") sys.exit(1) print() print("Reconnection failed for connection " + str(myo.connection_id) + ". Retry " + str(retries) + "...") myo.set_connected(True) return True def direct_connect(self, myo_to_connect, timeout=None): """ Procedure for a direct connection with the device. :param myo_to_connect: Myo object to connect. Should have its address set :param timeout: Time to wait for response :return: True if connection was successful, false otherwise. """ t0 = time.time() # Direct connection self._print_status("Connecting to", myo_to_connect.address) self.bluetooth.direct_connect(myo_to_connect.address) # Await response while myo_to_connect.connection_id is None or not myo_to_connect.connected: if timeout is not None and timeout + t0 < time.time(): return False self.receive() # Notify successful connection with self.print_status and vibration self._print_status("Connection successful. Setting up...") self._print_status() self.bluetooth.send_vibration_medium(myo_to_connect.connection_id) # Disable sleep self.bluetooth.disable_sleep(myo_to_connect.connection_id) # Enable data and subscribe self.bluetooth.enable_data(myo_to_connect.connection_id, self.config) print("Myo ready", myo_to_connect.connection_id, myo_to_connect.address) print() return True ############################################################################## # HANDLERS # ############################################################################## def handle_discover(self, _, payload): """ Handler for ble_evt_gap_scan_response event. """ if self.scanning and not self.myo_to_connect: self._print_status("Device found", payload['sender']) if payload['data'].endswith(bytes(Final.myo_id)): if not self._has_paired_with(payload['sender']): self.myo_to_connect = Myo(payload['sender']) self._print_status("Myo found", self.myo_to_connect.address) self._print_status() self.scanning = False def _has_paired_with(self, address): """ Checks if given address has already been recorded in a Myo initialization. :param address: address to check :return: True if already paired, False otherwise. """ for m in self.myos: if m.address == address: return True return False def handle_connect(self, _, payload): """ Handler for ble_rsp_gap_connect_direct event. """ if not payload['result'] == 0: if payload['result'] == 385: print("ERROR: Device in Wrong State") else: print(payload) else: self._print_status("Connection successful") def create_disconnect_handle(self, myo): def handle_disconnect(_, payload): """ Handler for ble_evt_connection_status event. """ if myo.connection_id == payload['connection']: print("Connection " + str(payload['connection']) + " lost.") myo.set_connected(False) if payload['reason'] == 574: print("Disconnected. Reason: Connection Failed to be Established.") if payload['reason'] == 534: print("Disconnected. Reason: Connection Terminated by Local Host.") if payload['reason'] == 520: print("Disconnected. Reason: Connection Timeout.") else: print("Disconnected:", payload) # Won't return until the connection is established successfully print("Reconnecting...") self.connect_and_retry(myo, self.config.RETRY_CONNECTION_AFTER, self.config.MAX_RETRIES) return handle_disconnect def create_connection_status_handle(self, myo): def handle_connection_status(_, payload): """ Handler for ble_evt_connection_status event. """ if payload['address'] == myo.address and payload['flags'] == 5: self._print_status("Connection status: ", payload) myo.set_connected(True) myo.set_id(payload['connection']) self._print_status("Connected with id", myo.connection_id) return handle_connection_status def handle_attribute_value(self, e, payload): """ Handler for EMG events, expected as a ble_evt_attclient_attribute_value event with handle 43, 46, 49 or 52. """ emg_handles = [ ServiceHandles.EmgData0Characteristic, ServiceHandles.EmgData1Characteristic, ServiceHandles.EmgData2Characteristic, ServiceHandles.EmgData3Characteristic ] imu_handles = [ ServiceHandles.IMUDataCharacteristic ] myo_info_handles = [ ServiceHandles.DeviceName, ServiceHandles.FirmwareVersionCharacteristic, ServiceHandles.BatteryCharacteristic ] # Delegate EMG if payload['atthandle'] in emg_handles: self.data_handler.handle_emg(payload) # Delegate IMU elif payload['atthandle'] in imu_handles: self.data_handler.handle_imu(payload) # TODO: Delegate classifier # Delegate myo info elif payload['atthandle'] in myo_info_handles: for myo in self.myos: myo.handle_attribute_value(payload) # Print otherwise else: self._print_status(e, payload) def set_handlers(self): """ Set handlers for relevant events. """ self.bluetooth.add_scan_response_handler(self.handle_discover) self.bluetooth.add_connect_response_handler(self.handle_connect) self.bluetooth.add_attribute_value_handler(self.handle_attribute_value) ############################################################################## # MYO # ############################################################################## def get_info(self): """ Send read attribute messages and await answer. """ if len(self.myos): self._print_status("Getting myo info") self._print_status() for myo in self.myos: self.bluetooth.read_device_name(myo.connection_id) self.bluetooth.read_firmware_version(myo.connection_id) self.bluetooth.read_battery_level(myo.connection_id) while not self._myos_ready(): self.receive() print("Myo list:") for myo in self.myos: print(" - " + str(myo)) print() def disconnect_all(self): """ Stop possible scanning and close all connections. """ self.bluetooth.disconnect_all() def deep_sleep_all(self): """ Send deep sleep (turn off) signal to every connected myo. """ print("Turning off devices...") for m in self.myos: self.bluetooth.deep_sleep(m.connection_id) print("Disconnected.") ############################################################################## # UTILS # ############################################################################## def _myos_ready(self): """ :return: True if every myo has its data set, False otherwise. """ for m in self.myos: if not m.ready(): return False return True def _print_status(self, *args): """ Printer function for VERBOSE support. """ if self.config.VERBOSE: print(*args)