def update_algo_dropdown(self, dataset): # If no datasets are imported yet if dataset is None: tk.Label(self, text="Please import a dataset first").grid(row=9, column=1, sticky="w") else: raw_file_paths = get_dataset_raw_file_paths(dataset) signal_names = [] for filename in raw_file_paths: signal_names.append(_helper.makeIdFromFilename(filename)) if len(raw_file_paths) > 1: signal_names.append("ALL") self.algorithmCSVSelected = tk.StringVar(self) self.algorithmCSVSelected.set(signal_names[0]) datasets = _helper.getDatasetList() if len(datasets) > 0: algorithm_csv_dropdown = tk.OptionMenu( self, self.algorithmCSVSelected, *signal_names) algorithm_csv_dropdown.config(width=DROPDOWN_DATASET_WIDTH) algorithm_csv_dropdown.grid(row=9, column=1, sticky="w") else: box = tk.OptionMenu(self, "None", 0, command=self.handle_select_dataset) box.config(width=DROPDOWN_DATASET_WIDTH) box.grid(row=9, column=1, sticky="w") box.config(state="disabled") algo_label = tk.Label(self, text="Please import a dataset first", anchor='w') algo_label.grid(row=9, column=1, sticky="w") algo_label.config(width=NO_DATASET_WIDTH)
def handle_delete_dataset(self): # checks if there is anything to be removed if len(_helper.getDatasetList()) == 0: tk.messagebox.showerror("Delete Dataset", "There is nothing to be remmoved") else: msgBox = tk.messagebox.askyesnocancel( "Delete Dataset", "Would you also like to delete all files related to the dataset?" ) if msgBox is None: return else: dataset = self.datasetSelected.get() import delete_dataset delete_dataset.main(dataset, allfiles=msgBox) datasets = _helper.getDatasetList() self.update_select_dataset_widget(datasets) self.update_copy_labels_widget(datasets) self.update_algo_dropdown(dataset)
def handle_select_dataset(self, event): selected_dataset = event datasets = _helper.getDatasetList() # Update Copy From dataset dropdown list self.copyLabelsFromDataset.set(selected_dataset) copy_labels_from_dropdown = tk.OptionMenu(self, self.copyLabelsFromDataset, *datasets) copy_labels_from_dropdown.config(width=DROPDOWN_DATASET_WIDTH) copy_labels_from_dropdown.grid(row=10, column=1, sticky="w") # Update algo csv dropdown list self.update_algo_dropdown(selected_dataset)
def _process_request(self, path, vars): global _debug_delay if _debug_delay: time.sleep(_debug_delay) if path == '/signaligner.html': if 'dataset' in vars and ALNUMUN_RE.match(vars['dataset']): dataset = vars['dataset'] else: dataset = 'null' if 'session' in vars and ALNUMUN_RE.match(vars['session']): session = vars['session'] else: session = SESSION_ERROR def replace_data(data): data = replace_vars(data, session, False) return data self._send_header_and_file_data( _folder.file_abspath('signaligner/signaligner.html'), False, CTYPE_HTML, replace_data) elif path == '/signaligner.js': def replace_data(data): data = replace_mode_config(data) return data self._send_header_and_file_data( _folder.file_abspath('signaligner/signaligner.js'), False, CTYPE_JS, replace_mode_config) elif path == '/fetchdatasetlist': datasets = _helper.getDatasetList() self._send_header(200, CTYPE_PLAIN) self._send_data(json.dumps(datasets), False) elif path == '/fetchdataset': if 'dataset' in vars and ALNUMUN_RE.match(vars['dataset']): dataset_name = vars['dataset'] if 'type' in vars and vars['type'] == 'config': file_path = _helper.datasetConfigFilename(dataset_name) elif 'type' in vars and vars[ 'type'] == 'tile' and 'id' in vars and ALNUMUN_RE.match( vars['id']): file_path = os.path.join( _helper.datasetTileDir(dataset_name), vars['id'] + '.json') else: self._send_header(404, CTYPE_PLAIN) return if not os.path.exists(file_path): self._send_header(404, CTYPE_PLAIN) return self._send_header_and_file_data(file_path, False, CTYPE_PLAIN) else: self._send_header(404, CTYPE_PLAIN) elif path == '/fetchlabels': if 'dataset' in vars and ALNUMUN_RE.match(vars['dataset']): dataset = vars['dataset'] self._send_header(200, CTYPE_PLAIN) labels = _helper.getLabelsLatest(dataset) if labels: self._send_data(json.dumps(labels), False) else: self._send_header(404, CTYPE_PLAIN) elif path == '/reportlabels': if 'data' in vars: data = json.loads(vars['data']) if 'dataset' in data and ALNUMUN_RE.match( data['dataset'] ) and 'session' in data and ALNUMUN_RE.match(data['session']): dataset = data['dataset'] session = data['session'] with open( _helper.ensureDirExists( _helper.logLabelsFilename(dataset, session), True), 'at') as dfile: dfile.write(json.dumps(data) + '\n') with open( _helper.ensureDirExists( _helper.latestLabelsFilename(dataset, session), True), 'wt') as dfile: dfile.write(json.dumps(data) + '\n') with open( _helper.ensureDirExists( _helper.latestLabelsFilename(dataset, session), True), 'rt') as dfile: response = json.loads(dfile.read()) self._send_header(200, CTYPE_PLAIN) self._send_data(json.dumps(response), False) else: self._send_header(404, CTYPE_PLAIN) else: self._send_header(404, CTYPE_PLAIN) elif path == '/mturksubmit' or path == '/mturksubmissions': if 'data' in vars: data = json.loads(vars['data']) if 'dataset' in data and ALNUMUN_RE.match( data['dataset'] ) and 'session' in data and ALNUMUN_RE.match(data['session']): dataset = data['dataset'] session = data['session'] if path == '/mturksubmit': mturk_submit = _helper.mturkSubmitLabelsFilename( dataset, session) if not os.path.exists(mturk_submit): with open( _helper.ensureDirExists( mturk_submit, True), 'wt') as dfile: dfile.write(json.dumps(data) + '\n') submissions = _helper.mturkGetSubmissions(session) total = 0 datasets = [] for submission in submissions: score = submission['score'] / 100.0 score = score**2 score *= submission['daysofdata'] # minimum of 1 cent for tutorial levels, 20 cents for challenge score = max(score, 0.20) if submission['istutorial']: score *= 0.05 total += score datasets.append(submission['dataset']) total = int(total * 100) if session not in _mturk_session_codes: _mturk_session_codes[session] = _helper.makeId()[:3] code = _mturk_session_codes[session] code = code + ('%03d' % total).upper() code = code + hashlib.md5( code.encode('utf-8')).hexdigest()[:3].upper() response = { 'amount': '$%d.%02d' % (total // 100, total % 100), 'code': code, 'datasets': datasets } self._send_header(200, CTYPE_PLAIN) self._send_data(json.dumps(response), False) else: self._send_header(404, CTYPE_PLAIN) else: self._send_header(404, CTYPE_PLAIN) elif path == '/log': if 'data' in vars: with open( _helper.ensureDirExists( _folder.data_abspath('playlog'), True), 'at') as dfile: dfile.write(vars['data'] + '\n') self._send_header(200, CTYPE_PLAIN) elif HTML_RE.match(path): if path == '/mturk_start.html': global _mode if _mode != 'MTURK': self._send_header(200, CTYPE_PLAIN) self._send_data( 'mode must be MTURK to request mturk_start.html', False) return if 'session' in vars and ALNUMUN_RE.match(vars['session']): session = vars['session'] else: session = SESSION_ERROR def replace_data(data): return replace_vars(data, session, True) self._send_header_and_file_data( _folder.file_abspath('static' + path), False, CTYPE_HTML, replace_data) elif PNG_RE.match(path): self._send_header_and_file_data( _folder.file_abspath('static' + path), True, CTYPE_PNG) elif JS_RE.match(path): self._send_header_and_file_data( _folder.file_abspath('static' + path), False, CTYPE_JS) elif CSS_RE.match(path): self._send_header_and_file_data( _folder.file_abspath('static' + path), False, CTYPE_CSS) else: self._send_header(404, CTYPE_PLAIN)
def create_widgets(self): self.server_button = tk.Button(self) tk.Label(self, text='Open Test Dataset:').grid(row=0, column=0, sticky="w") tk.Button(self, text='Open', command=self.handle_opentest, width=BUTTON_WIDTH).grid(row=0, column=1, sticky="w") tk.Label(self, text='Import Single Dataset:').grid(row=1, column=0, sticky="w") tk.Button(self, text='Select File/Folder', command=self.handle_import_dataset, width=BUTTON_WIDTH).grid(row=1, column=1, sticky="w") tk.Label(self, text='Import Multiple Datasets:').grid(row=2, column=0, sticky="w") tk.Button(self, text='Select Folder', command=self.handle_import_all_dataset, width=BUTTON_WIDTH).grid(row=2, column=1, sticky="w") tk.Label(self, text="").grid(row=3, sticky="w") tk.Label(self, text='Select Dataset:').grid(row=4, column=0, sticky="w") datasets = _helper.getDatasetList() self.update_select_dataset_widget(datasets) tk.Label(self, text='Open Selected Dataset:').grid(row=5, column=0, sticky="w") tk.Button(self, text="Open", command=self.handle_load_dataset, width=BUTTON_WIDTH).grid(row=5, column=1, sticky="w") tk.Label(self, text="Delete Selected Dataset:").grid(row=6, column=0, sticky="w") tk.Button(self, text="Delete", command=self.handle_delete_dataset, width=BUTTON_WIDTH).grid(row=6, column=1, sticky="w") tk.Label(self, text='Import Dataset Labels:').grid(row=7, column=0, sticky="w") tk.Button(self, text='Select File', command=self.handle_import_labels, width=BUTTON_WIDTH).grid(row=7, column=1, sticky="w") tk.Label(self, text='Export Dataset Labels:').grid(row=8, column=0, sticky="w") tk.Button(self, text="Export", command=self.handle_export_labels, width=BUTTON_WIDTH).grid(row=8, column=1, sticky="w") algorithms = ['MUSS', 'SWaN', 'QC'] self.algorithmSelected = tk.StringVar(self) self.algorithmSelected.set(algorithms[0]) tk.Label(self, text="Run Algorithm on Signal:").grid(row=9, column=0, sticky="w") algo_dropdown = tk.OptionMenu(self, self.algorithmSelected, *algorithms) algo_dropdown.config(width=DROPDOWN_ALGO_WIDTH) algo_dropdown.grid(row=9, column=2, sticky="w") self.update_algo_dropdown(None if self.datasetSelected is None else self.datasetSelected.get()) tk.Button(self, text="Run", command=self.handle_run_algo, width=BUTTON_WIDTH).grid(row=9, column=3, sticky="w") tk.Label(self, text="Copy Labels From:").grid(row=10, column=0, sticky="w") self.update_copy_labels_widget(datasets) tk.Label(self, text="").grid(row=11, sticky="w") tk.Label(self, text="Quit Signalauncher:").grid(row=12, column=0, sticky="w") tk.Button(self, text="Quit", command=self.handle_quit, width=BUTTON_WIDTH).grid(row=12, column=1, sticky="w") tk.Label(self, text="Data In:").grid(row=13, column=0, sticky="w") tk.Label(self, text=_folder.data_folder).grid(row=13, column=1, columnspan=3, sticky="w") tk.Label(self, text="Log File:").grid(row=14, column=0, sticky="w") tk.Label(self, text=logfilename).grid(row=14, column=1, columnspan=3, sticky="w")