def on_simulation_connected(self, simulation): self.simulation = simulation if simulation is None: QtWidgets.QMessageBox().warning(self, "Warning", "Couldn't connect to v-rep.", QtWidgets.QMessageBox.Ok)
def main(): parser = argparse.ArgumentParser( description="EMG gesture recognition with real-time feedback.") parser.add_argument( '-c', '--config', dest='config', default='config.py', help="Config file. Default is `config.py` (current directory).") parser.add_argument('-t', '--test', dest='test', action='store_true', default=False) args = parser.parse_args() cfg = config.Config(args.config) cfg.test = args.test app = QtWidgets.QApplication([]) mw = PygestureMainWindow(cfg) mw.show() app.exec_() app.deleteLater() sys.exit(0)
def on_participant_selection(self, text): pid = str(text) if pid == '': return self.pid = pid self.sid_list = filestruct.get_session_list(self.data_path, self.pid, search=self.session_filter) self.ui.sessionList.clear() for sid in self.sid_list: QtWidgets.QListWidgetItem(sid, self.ui.sessionList) self.participant_selected.emit(pid)
def __init__(self, config, parent=None): super(PygestureMainWindow, self).__init__(parent) self.cfg = config self.session = None self.ui = Ui_PygestureMainWindow() self.ui.setupUi(self) self.record_thread = recorder.RecordThread(self.cfg.daq) self.init_paths() self.init_tabs() self.statusbar_label = QtWidgets.QLabel("not signed in") self.ui.statusbar.addPermanentWidget(self.statusbar_label) self.ui.actionNew.triggered.connect(self.show_new_session_dialog)
def init_base_session(self): if self.simulation is None and self.isVisible(): self.init_simulation() self.pid = self.base_session.pid self.ui.trainingList.clear() self.sid_list = filestruct.get_session_list(self.cfg.data_path, self.pid, search="train") for sid in self.sid_list: item = QtWidgets.QListWidgetItem(sid, self.ui.trainingList) item.setFlags(item.flags() | QtCore.Qt.ItemIsUserCheckable) item.setCheckState(QtCore.Qt.Unchecked) try: filestruct.find_feature_file(self.cfg.data_path, self.pid, sid) except: item.setFlags(item.flags() & ~QtCore.Qt.ItemIsEnabled) self.lefty = True if self.base_session.hand == 'left' else False
def new_session(self, data): if data['pid'] == '' or data['sid'] == '': QtWidgets.QMessageBox.critical(self, "Error", "Session info incomplete.") return self.session = Session(self.cfg.data_path, data['pid'], data['sid'], data['task'], data['configuration'], data['hand']) # if session exists, make sure the user wants to overwrite it try: self.session.init_file_structure() except IOError: message = QtWidgets.QMessageBox().warning( self, "Warning", "Session directory already exists.\nOverwrite?", QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No) if message == QtWidgets.QMessageBox.No: self.session = None return else: self.session.init_file_structure(force=True) self.remove_session_tab() widgetcls = self.cfg.ui_tabs[self.session.task] widget = widgetcls(self.cfg, self.record_thread, self.session, parent=self) self.ui.tabWidget.addTab(widget, self.session.task) self.ui.tabWidget.setCurrentIndex(self.ui.tabWidget.count() - 1) self.statusbar_label.setText("Session " + str(self.session))
def build_pipeline(self): """ Builds the processing pipeline. Most of the pipeline is specified by the config, but we need to gather training data, build a classifier with that data, and insert the classifier into the pipeline. """ train_list = [] for i in range(self.ui.trainingList.count()): item = self.ui.trainingList.item(i) if item.checkState(): train_list.append(str(item.text())) self.training_sessions = train_list if not train_list: QtWidgets.QMessageBox().critical( self, "Error", "No sessions selected for training.", QtWidgets.QMessageBox.Ok) return # get only the labels for the selected TAC session # need to loop over available gestures to catch those with no dof labels = [] mapping = {} for gesture in self.cfg.gestures: if gesture.dof is None: labels.append(gesture.label) mapping[gesture.label] = gesture.action else: if gesture in self.tac_session.gestures: labels.append(gesture.label) mapping[gesture.label] = gesture.action file_list = filestruct.get_feature_file_list(self.cfg.data_path, self.pid, train_list) training_data = processing.read_feature_file_list(file_list, labels=labels) # get average MAV for each gesture label to auto-set boosts # warning: super hacky j = 0 start = 0 for i, feature in enumerate(self.cfg.feature_extractor.features): if 'MAV' in str(feature): start = j break else: j += feature.dim_per_channel * len(self.cfg.channels) X, y = training_data X = X[:, start:len(self.cfg.channels)] boosts = dict() for label in labels: mav_avg = np.mean(X[y == label, :], axis=1) # -np.partition(-data, N) gets N largest elements of data boosts[label] = 1 / np.mean(-np.partition(-mav_avg, 10)[:10]) self.boosts = boosts # re-create the controller to make sure it has the correct mapping self.controller = control.DBVRController( mapping=mapping, ramp_length=self.cfg.controller.ramp_length, boosts=1 if self.test else boosts) self.cfg.learner.fit(*training_data) self.pipeline = pipeline.Pipeline([ self.cfg.conditioner, self.cfg.windower, ( features.FeatureExtractor([features.MAV()], len(self.cfg.channels)), [self.cfg.feature_extractor, self.cfg.learner], ) ]) self.record_thread.set_pipeline(self.pipeline)
def on_record_error(self): self.on_pause_clicked() QtWidgets.QMessageBox().critical(self, "Error", "DAQ failure.", QtWidgets.QMessageBox.Ok)
def __init__(self, parent=None): super(GestureView, self).__init__(parent) self.label = QtWidgets.QLabel(self) self.label.setScaledContents(True) self.label.setFixedSize(0, 0)