def closeEvent(self, event): """Exchange default event to add a dialog""" if self.multiple_data_sets.empty: reply = MessageBox.question(self, 'Warning!', "Are you sure to quit?", QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) else: reply = MessageBox.question(self, 'Warning!', "You have unsaved analysed data! \n Are you sure to quit?", QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.Yes: event.accept() else: event.ignore()
def closeEvent(self, event): """Exchange default event to add a dialog""" if self.multiple_data_sets.empty: reply = MessageBox.question(self, 'Warning!', "Are you sure to quit?", QMessageBox.Yes, QMessageBox.No) else: reply = MessageBox.question(self, 'Warning!', "You have unsaved analysed data! \n Are you sure to quit?", QMessageBox.Yes, QMessageBox.No) if reply == QMessageBox.Yes: event.accept() else: event.ignore()
def coordinates_analysis(self, ): """ Main function """ coord_x, coord_y = zip(*self.coordinates) leftpb_x, leftpb_y = zip(*self.left_peak_border) rightpb_x, rightpb_y= zip(*self.right_peak_border) # absolute amplitude % and MAX relative_amplitude = [] ampl_max = max(self.amplitudes) relative_amplitude[:] = [(i / ampl_max) for i in self.amplitudes] # create temporal Pandas DataFrame for sorting and calculation: temp_dataset = list( zip(coord_x, self.amplitudes, relative_amplitude, leftpb_x, leftpb_y, rightpb_x, rightpb_y, self.area)) df = pd.DataFrame(data=temp_dataset, columns=['Peak Time', 'Amplitude', 'Relative Amplitude \n (F/Fmax)', 'Peak Start Time', 'Peak Start Ordinate', 'Peak Stop Time', 'Peak Stop Ordinate', 'Area']) # Sort data in DataFrame according to the time of peak appearance df_sorted = df.sort_values(['Peak Time'], ascending=True) df_sorted.index = range(0, len(df_sorted)) # reset indexing # calculate periods periods = [] for i in range(1, len(df_sorted['Peak Time'])): periods.append(df_sorted.at[i, 'Peak Time'] - df_sorted.at[i - 1, 'Peak Time']) periods.insert(0, np.nan) # add placeholder because len(periods)=len(peaks)-1 # calculate frequencies based on calculated periods frequencies = [] frequencies[:] = [(1 / i) for i in periods] # Analise peak start - stop time (left and right peak borders) peak_full_time = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_full_time.append(df_sorted.at[i, 'Peak Stop Time'] - df_sorted.at[i, 'Peak Start Time']) peak_up_time = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_up_time.append(df_sorted.at[i, 'Peak Time'] - df_sorted.at[i, 'Peak Start Time']) peak_down_time = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_down_time.append(df_sorted.at[i, 'Peak Stop Time'] - df_sorted.at[i, 'Peak Time']) # Compute area under the peak using the composite trapezoidal rule. peak_area = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_area.append(np.trapz(df_sorted.at[i, 'Area'])) # Analise the peak decay area half_decay_time = [] half_decay_amplitude = [] for i in range(0, len(df_sorted['Peak Time']), 1): half_decay_ampl = df_sorted.at[i, 'Amplitude'] / 2 # calculate the half of the amplitude peak_index = self.x.index(df_sorted.at[i, 'Peak Time']) # find index of the peak time stop_idx = self.x.index(df_sorted.at[i, 'Peak Stop Time']) # find index of the right peak border data_decay_region = self.data_after_filter[peak_index:stop_idx] # determine the amplitude region where to search for halftime decay index time_decay_region = self.x[peak_index:stop_idx] half_decay_idx = (np.abs(data_decay_region - half_decay_ampl)).argmin() # find the closet value in data_decay_region that corresponds to the half amplitude half_decay_amplitude.append(half_decay_ampl) half_decay_time.append(time_decay_region[half_decay_idx] - df_sorted.at[i, 'Peak Time']) # Compute amplitude normalised to the baseline normalised_amplitude = [] sg_window_frame = self.BoxSGwindowFrame.value() sg_polynom_degree = self.BoxSGpolynomDegree.value() orig_data_filtered = sig.savgol_filter(self.y, sg_window_frame, sg_polynom_degree) for i in range(0, len(df_sorted['Peak Time']), 1): start_idx = self.x.index(df_sorted.at[i, 'Peak Start Time']) F0 = orig_data_filtered[start_idx] amplitude_normed_computation = df_sorted.at[i, 'Amplitude'] / F0 normalised_amplitude.append(amplitude_normed_computation) # normalised amplitude % relative_normalised_amplitude = [] maxATB = max(normalised_amplitude) relative_normalised_amplitude[:] = [(i / maxATB) for i in normalised_amplitude] # normalised amplitude MAX normalised_amplitude_max = list(range(0, len(df_sorted['Peak Time']) - 1)) normalised_amplitude_max[:] = [np.nan for _ in normalised_amplitude_max] normalised_amplitude_max.insert(0, maxATB) # add file name as first column file_name = list(range(0, len(df_sorted['Peak Time']) - 1)) file_name[:] = [np.nan for _ in file_name] file_name.insert(0, self.graph_name) # add maximum amplitude absolute_amplitude_max = list(range(0, len(df_sorted['Peak Time']) - 1)) absolute_amplitude_max[:] = [np.nan for _ in absolute_amplitude_max] absolute_amplitude_max.insert(0, max(df_sorted['Amplitude'])) # peak sorting big_peaks_number = [p for p in self.amplitudes if (p > ampl_max * 0.66)] medium_peaks_number = [p for p in self.amplitudes if (p > ampl_max * 0.33 and p <= ampl_max * 0.66)] small_peaks_number = [p for p in self.amplitudes if (p > 0 and p <= ampl_max * 0.33)] big_peaks_frequency = list(range(0, len(df_sorted['Peak Time']) - 1)) big_peaks_frequency[:] = [np.nan for _ in big_peaks_frequency] big_peaks_frequency.insert(0, len(big_peaks_number) / (self.x[-1] - self.x[0])) medium_peaks_frequency = list(range(0, len(df_sorted['Peak Time']) - 1)) medium_peaks_frequency[:] = [np.nan for _ in medium_peaks_frequency] medium_peaks_frequency.insert(0, len(medium_peaks_number) / (self.x[-1] - self.x[0])) small_peaks_frequency = list(range(0, len(df_sorted['Peak Time']) - 1)) small_peaks_frequency[:] = [np.nan for _ in small_peaks_frequency] small_peaks_frequency.insert(0, len(small_peaks_number) / (self.x[-1] - self.x[0])) final_dataset = list(zip(file_name, df_sorted['Peak Time'], df_sorted['Amplitude'], df_sorted['Relative Amplitude \n (F/Fmax)'], absolute_amplitude_max, normalised_amplitude, relative_normalised_amplitude, normalised_amplitude_max, periods, frequencies, half_decay_time, half_decay_amplitude, df_sorted['Peak Start Time'], df_sorted['Peak Start Ordinate'], df_sorted['Peak Stop Time'], df_sorted['Peak Stop Ordinate'], peak_up_time, peak_down_time, peak_full_time, peak_area, big_peaks_frequency, medium_peaks_frequency, small_peaks_frequency)) final_dataframe = pd.DataFrame(data=final_dataset, columns=['File name', 'Peak time', 'Absolute amplitude', 'Absolute amplitude (%)', 'Absolute amplitude MAX', 'Normalised amplitude', 'Normalised amplitude (%)', 'Normalised amplitude MAX', 'Period', 'Frequency', 'Half-decay time', 'Half-decay amplitude', 'Start time', 'Start ordinate', 'Stop time', 'Stop ordinate', 'Ascending time', 'Decay time', 'Full peak time', 'AUC', 'Big peaks, Hz', 'Mid peaks, Hz', 'Small peaks, Hz']) # specify data for export acording to the settings tab in GUI # and append current analysed dataset to existing ones try: columns_to_delete_for_export = [] if not self.chbxFileName.isChecked(): columns_to_delete_for_export.append('File name') if not self.chbxPeakTime.isChecked(): columns_to_delete_for_export.append('Peak time') if not self.chbxAmplAbs.isChecked(): columns_to_delete_for_export.append('Absolute amplitude') if not self.chbxAmplAbsRel.isChecked(): columns_to_delete_for_export.append('Absolute amplitude (%)') if not self.chbxAmplAbsMax.isChecked(): columns_to_delete_for_export.append('Absolute amplitude MAX') if not self.chbxAmplNorm.isChecked(): columns_to_delete_for_export.append('Normalised amplitude') if not self.chbxAmplNormRel.isChecked(): columns_to_delete_for_export.append('Normalised amplitude (%)') if not self.chbxAmplNormMax.isChecked(): columns_to_delete_for_export.append('Normalised amplitude MAX') if not self.chbxPeriod.isChecked(): columns_to_delete_for_export.append('Period') if not self.chbxFreq.isChecked(): columns_to_delete_for_export.append('Frequency') if not self.chbxHalfDecayTime.isChecked(): columns_to_delete_for_export.append('Half-decay time') if not self.chbxHalfDecayAmpl.isChecked(): columns_to_delete_for_export.append('Half-decay amplitude') if not self.chbxLeftBorderTime.isChecked(): columns_to_delete_for_export.append('Start time') if not self.chbxLeftBorder.isChecked(): columns_to_delete_for_export.append('Start ordinate') if not self.chbxRightBorderTime.isChecked(): columns_to_delete_for_export.append('Stop time') if not self.chbxRightBorder.isChecked(): columns_to_delete_for_export.append('Stop ordinate') if not self.chbxTimeToPeak.isChecked(): columns_to_delete_for_export.append('Ascending time') if not self.chbxDecayTime.isChecked(): columns_to_delete_for_export.append('Decay time') if not self.chbxFullPeakTime.isChecked(): columns_to_delete_for_export.append('Full peak time') if not self.chbxAUC.isChecked(): columns_to_delete_for_export.append('AUC') if not self.chbxSmallPeaks.isChecked(): columns_to_delete_for_export.append('Big peaks, Hz') if not self.chbxMidPeaks.isChecked(): columns_to_delete_for_export.append('Mid peaks, Hz') if not self.chbxBigPeaks.isChecked(): columns_to_delete_for_export.append('Small peaks, Hz') final_dataframe.drop(columns_to_delete_for_export, axis=1, inplace=True) self.multiple_data_sets = self.multiple_data_sets.append(final_dataframe) if self.chbxSaveFig.isChecked(): os.makedirs('_Figures', exist_ok=True) dpi = self.BoxDPI.value() plt.savefig(os.path.join('_Figures', 'Fig_{figName}.png'.format(figName=self.graph_name)), dpi=dpi) del df del df_sorted del final_dataframe dialog = MessageBox.question(self, '', "Current dataset was analysed \n and added to previous ones (if exist). \n Would you like to load next file? ", QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) if dialog == QtGui.QMessageBox.Yes: self.load_file() else: self.rmmpl() self.BtnSaveFullDataset.setStyleSheet("background-color: #7CF2BD") self.BtnLoadFile.setStyleSheet("background-color: #7CF2BD") except: message = MessageBox() message.about(self, 'Warning!', "Data were not added to existing dataset. \n Plese be sure that you did not change the output settings.")
class GCWii(Ui_MainWindow): def __init__(self, source='', destination='', clear_destination=False): super(GCWii, self).__init__() self.export_in_progress = False app = QtWidgets.QApplication(sys.argv) self.MainWindow = QtWidgets.QMainWindow() self.max_treads = QThread.idealThreadCount() self.setupUi(self.MainWindow) self.msg = MessageBox() self.manager = GCWiiManager() self.default_box_artwork = str(os.path.join('images', 'blanc-case.png')) self.default_disc_artwork = str( os.path.join('images', 'blanc-disc.png')) self.source_directory = source self.source_game_collection = {} self.destination_directory = destination self.clear_destination = clear_destination self.destination_game_collection = {} self.box_artwork_path = os.path.join(os.getcwd(), 'images', 'cover3D') self.disc_artwork_path = os.path.join(os.getcwd(), 'images', 'disc') self.current_selection = {} self.games_to_export = {} self.setup_widgets() self.setup_actions() self.MainWindow.show() if self.source_directory: self.update_source_list() if self.destination_directory: self.update_destination_list() sys.exit(app.exec_()) def setup_widgets(self): self.source_btn.clicked.connect(lambda: self.update_source_list(True)) self.destination_btn.clicked.connect( lambda: self.update_destination_list(True)) self.export_btn.clicked.connect(self.export_all) self.listView_source.clicked.connect( lambda: self.update_art_work('source')) self.listView_destination.clicked.connect( lambda: self.update_art_work('destination')) self.listView_destination.addAction(self.action_reload_destination) self.listView_destination.addAction( self.action_select_folder_destination) self.listView_destination.addAction( self.action_delete_selected_in_destination) self.listView_destination.addAction( self.action_delete_all_items_in_destination) self.listView_source.addAction(self.action_reload_source) self.listView_source.addAction(self.action_select_folder_source) self.listView_source.addAction(self.action_export_selected) self.exit_btn.clicked.connect(self.quit) self.exportSelected_btn.clicked.connect(self.export_selection) self.cancel_btn.setEnabled(False) self.cancel_btn.clicked.connect(self.cancel_copy) self.label_box.setPixmap(QtGui.QPixmap(self.default_box_artwork)) self.label_disc.setPixmap(QtGui.QPixmap(self.default_disc_artwork)) self.progressBar_fileProgress.setVisible(False) self.progressBar_destination.setVisible(False) def setup_actions(self): # Source self.action_reload_source.triggered.connect(self.update_source_list) self.action_select_folder_source.triggered.connect( lambda: self.update_source_list(True)) self.action_export_selected.triggered.connect(self.export_selection) # Destination self.action_reload_destination.triggered.connect( self.update_destination_list) self.action_select_folder_destination.triggered.connect( lambda: self.update_destination_list(True)) self.action_delete_selected_in_destination.triggered.connect( self.delete_selected_in_destination) self.action_delete_all_items_in_destination.triggered.connect( self.delete_all_in_destination) def delete_all_in_destination(self): self.manager.delete_all_files_in_directory(self.destination_directory) self.update_destination_list() def delete_selected_in_destination(self): for item in self.listView_destination.selectedIndexes(): title = item.data() game = self.manager.get_game_from_collection_by_title( title, self.destination_game_collection) if game: self.manager.delete_all_files_in_directory(game["path"]) self.update_destination_list() def export_selection(self): """ Export games marked on the source list_name """ results = dict() for index in self.listView_source.selectedIndexes(): title = QModelIndex.data(index) for key in self.source_game_collection.keys(): if self.source_game_collection[key]["title"] == title: results[key] = self.source_game_collection[key] self.games_to_export = results self.export() def export_all(self): self.games_to_export = self.manager.get_collection_diff( self.source_game_collection, self.destination_game_collection) if not self.games_to_export: return self.msg.info("Nothing to export") self.export() def update_status_info(self, text=None): if not text: self.MainWindow.statusBar().clearMessage() return self.MainWindow.statusBar().setVisible(False) if not self.MainWindow.statusBar().isVisible(): self.MainWindow.statusBar().setVisible(True) self.MainWindow.statusBar().showMessage(text) def update_global_progress_bar(self, value): self.progressBar_destination.setValue(value) def update_file_progress_bar(self, value): self.progressBar_fileProgress.setValue(value) def reset_progress_bars(self): self.progressBar_fileProgress.setValue(0) self.progressBar_destination.setValue(0) def hide_progress_bars(self): self.progressBar_fileProgress.setVisible(False) self.progressBar_destination.setVisible(False) def show_progress_bars(self): self.progressBar_fileProgress.setVisible(True) self.progressBar_destination.setVisible(True) def update_art_work(self, list_name): identifier = self.get_selection(list_name) box = self.default_box_artwork disc = self.default_disc_artwork if not identifier: return region = self.manager.get_game_region(identifier) try: if GameTDBclient.get_art_work(region, identifier, True, None): box = str( os.path.join(self.box_artwork_path, region, identifier + ".png")) if GameTDBclient.get_art_work(region, identifier, None, True): disc = str( os.path.join(self.disc_artwork_path, region, identifier + ".png")) except GameTDBclient.ErrorFetchingData: print("Unable to fetch artwork for game id: '{}' region: '{}'". format(identifier, region)) self.label_box.setPixmap(QtGui.QPixmap(box)) self.label_disc.setPixmap(QtGui.QPixmap(disc)) def get_selection(self, list_name): games_collection = dict() model = None if list_name == 'source': model = self.listView_source.currentIndex() games_collection = self.source_game_collection elif list_name == 'destination': model = self.listView_destination.currentIndex() games_collection = self.destination_game_collection title = QModelIndex.data(model) for identifier in games_collection: if games_collection[identifier]["title"] == title: return identifier def select_directory(self): return QtWidgets.QFileDialog.getExistingDirectory(self.MainWindow) def update_source_list(self, select=False): try: if select: directory = self.select_directory() if not directory: return self.source_directory = directory if not self.source_directory: return list_of_found_files = self.manager.find_supported_files( self.source_directory) self.source_game_collection = self.manager.generate_game_collection( list_of_found_files) self.label_source.setText('Source: ' + self.source_directory) list_of_titles = self.manager.get_sorted_game_titles( self.source_game_collection) if not list_of_titles: return self.listView_source.setModel( QStringListModel(['No Wii or GameCube game found'])) self.listView_source.setModel(QStringListModel(list_of_titles)) self.listView_source.selectionModel().selectionChanged.connect( lambda: self.update_art_work('source')) self.listView_source.setEditTriggers( QtWidgets.QAbstractItemView.NoEditTriggers) except PermissionError as err: details = f"Directory '{self.source_directory}' can not be read" self.msg.warning('Directory not readable', details, str(err)) def update_destination_list(self, select=False): try: while select: directory = self.select_directory() if not directory: return if not self.manager.test_directory_writeable(directory): response = self.msg.question( "Directory is not writeable.\nDo you want to select a different directory?" ) if not response: return continue select = False self.destination_directory = directory if not self.destination_directory: return list_of_found_files = self.manager.find_supported_files( self.destination_directory) self.destination_game_collection = self.manager.generate_game_collection( list_of_found_files) self.label_destination.setText('Destination: ' + self.destination_directory) self.destination_directory = self.destination_directory list_of_titles = self.manager.get_sorted_game_titles( self.destination_game_collection) if not list_of_titles: return self.listView_destination.setModel( QStringListModel(['No Wii or GameCube game found'])) self.listView_destination.setModel( QStringListModel(list_of_titles)) self.listView_destination.selectionModel( ).selectionChanged.connect( lambda: self.update_art_work('destination')) self.listView_source.setEditTriggers( QtWidgets.QAbstractItemView.NoEditTriggers) except PermissionError as err: details = f"Directory '{self.destination_directory}' can not be read" self.msg.warning('Directory not readable', details, str(err)) def export(self): if not self.source_game_collection: return self.msg.info("Source list is empty") if not self.source_directory: return self.msg.info("Please select source folder") if not self.destination_directory: return self.msg.info("Please select destination folder") if self.destination_directory == self.source_directory: return self.msg.warning( "Source and destination should not be the same directory.") if not self.games_to_export: return self.msg.info( "Please select games from source or click \"Export All\"") print("\nProcessing") self.export_btn.setDisabled(True) self.exportSelected_btn.setDisabled(True) self.source_btn.setDisabled(True) self.destination_btn.setDisabled(True) self.cancel_btn.setEnabled(True) self.show_progress_bars() # Create a QThread object self.thread = QThread() # Create a worker object self.worker = CopyWorker(self.games_to_export, self.destination_directory) # Move worker to the thread self.worker.moveToThread(self.thread) # Connect signals and slots self.thread.started.connect(self.worker.run) self.worker.thread_file_progress.progress.connect( self.update_file_progress_bar) self.worker.progress.connect(self.update_global_progress_bar) self.worker.processing.connect(self.handle_worker_processing_update) self.worker.finished.connect(self.handle_worker_finished) self.worker.error.connect(self.msg.critical) self.thread.start() def handle_worker_processing_update(self, info=None): self.update_status_info(info) self.update_destination_list() def handle_worker_finished(self): print("\nFinished") self.update_destination_list() self.reset_progress_bars() self.hide_progress_bars() self.worker.quit() self.thread.quit() self.export_btn.setDisabled(False) self.exportSelected_btn.setDisabled(False) self.source_btn.setDisabled(False) self.destination_btn.setDisabled(False) self.cancel_btn.setEnabled(False) self.cancel_btn.setText("Cancel") def quit(self): if self.clear_destination: self.manager.delete_all_files_in_directory( self.destination_directory) sys.exit(0) def cancel_copy(self): try: self.worker.stop() self.thread.quit() self.cancel_btn.setText("Cancelling...") self.cancel_btn.setEnabled(False) print("\nCanceling") except AttributeError: self.msg.info("There is nothing to cancel. ")
def coordinates_analysis(self, ): """ Main function """ coord_x, coord_y = zip(*self.coordinates) leftpb_x, leftpb_y = zip(*self.left_peak_border) rightpb_x, rightpb_y= zip(*self.right_peak_border) # absolute amplitude % and MAX relative_amplitude = [] ampl_max = max(self.amplitudes) relative_amplitude[:] = [(i / ampl_max) for i in self.amplitudes] # create temporal Pandas DataFrame for sorting and calculation: temp_dataset = list( zip(coord_x, self.amplitudes, relative_amplitude, leftpb_x, leftpb_y, rightpb_x, rightpb_y, self.area)) df = pd.DataFrame(data=temp_dataset, columns=['Peak Time', 'Amplitude', 'Relative Amplitude \n (F/Fmax)', 'Peak Start Time', 'Peak Start Ordinate', 'Peak Stop Time', 'Peak Stop Ordinate', 'Area']) # Sort data in DataFrame according to the time of peak appearance df_sorted = df.sort_values(['Peak Time'], ascending=True) df_sorted.index = range(0, len(df_sorted)) # reset indexing # calculate periods periods = [] for i in range(1, len(df_sorted['Peak Time'])): periods.append(df_sorted.at[i, 'Peak Time'] - df_sorted.at[i - 1, 'Peak Time']) periods.insert(0, np.nan) # add placeholder because len(periods)=len(peaks)-1 # calculate frequencies based on calculated periods frequencies = [] frequencies[:] = [(1 / i) for i in periods] # Analise peak start - stop time (left and right peak borders) peak_full_time = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_full_time.append(df_sorted.at[i, 'Peak Stop Time'] - df_sorted.at[i, 'Peak Start Time']) peak_up_time = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_up_time.append(df_sorted.at[i, 'Peak Time'] - df_sorted.at[i, 'Peak Start Time']) peak_down_time = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_down_time.append(df_sorted.at[i, 'Peak Stop Time'] - df_sorted.at[i, 'Peak Time']) # Compute area under the peak using the composite trapezoidal rule. peak_area = [] for i in range(0, len(df_sorted['Peak Time']), 1): peak_area.append(np.trapz(df_sorted.at[i, 'Area'])) # Analise the peak decay area half_decay_time = [] half_decay_amplitude = [] for i in range(0, len(df_sorted['Peak Time']), 1): half_decay_ampl = df_sorted.at[i, 'Amplitude'] / 2 # calculate the half of the amplitude peak_index = self.x.index(df_sorted.at[i, 'Peak Time']) # find index of the peak time stop_idx = self.x.index(df_sorted.at[i, 'Peak Stop Time']) # find index of the right peak border data_decay_region = self.data_after_filter[peak_index:stop_idx] # determine the amplitude region where to search for halftime decay index time_decay_region = self.x[peak_index:stop_idx] half_decay_idx = (np.abs(data_decay_region - half_decay_ampl)).argmin() # find the closet value in data_decay_region that corresponds to the half amplitude half_decay_amplitude.append(half_decay_ampl) half_decay_time.append(time_decay_region[half_decay_idx] - df_sorted.at[i, 'Peak Time']) # Compute amplitude normalised to the baseline normalised_amplitude = [] sg_window_frame = self.BoxSGwindowFrame.value() sg_polynom_degree = self.BoxSGpolynomDegree.value() orig_data_filtered = sig.savgol_filter(self.y, sg_window_frame, sg_polynom_degree) for i in range(0, len(df_sorted['Peak Time']), 1): start_idx = self.x.index(df_sorted.at[i, 'Peak Start Time']) F0 = orig_data_filtered[start_idx] amplitude_normed_computation = df_sorted.at[i, 'Amplitude'] / F0 normalised_amplitude.append(amplitude_normed_computation) # normalised amplitude % relative_normalised_amplitude = [] maxATB = max(normalised_amplitude) relative_normalised_amplitude[:] = [(i / maxATB) for i in normalised_amplitude] # normalised amplitude MAX normalised_amplitude_max = list(range(0, len(df_sorted['Peak Time']) - 1)) normalised_amplitude_max[:] = [np.nan for _ in normalised_amplitude_max] normalised_amplitude_max.insert(0, maxATB) # add file name as first column file_name = list(range(0, len(df_sorted['Peak Time']) - 1)) file_name[:] = [np.nan for _ in file_name] file_name.insert(0, self.graph_name) # add maximum amplitude absolute_amplitude_max = list(range(0, len(df_sorted['Peak Time']) - 1)) absolute_amplitude_max[:] = [np.nan for _ in absolute_amplitude_max] absolute_amplitude_max.insert(0, max(df_sorted['Amplitude'])) # peak sorting big_peaks_number = [p for p in self.amplitudes if (p > ampl_max * 0.66)] medium_peaks_number = [p for p in self.amplitudes if (p > ampl_max * 0.33 and p <= ampl_max * 0.66)] small_peaks_number = [p for p in self.amplitudes if (p > 0 and p <= ampl_max * 0.33)] big_peaks_frequency = list(range(0, len(df_sorted['Peak Time']) - 1)) big_peaks_frequency[:] = [np.nan for _ in big_peaks_frequency] big_peaks_frequency.insert(0, len(big_peaks_number) / (self.x[-1] - self.x[0])) medium_peaks_frequency = list(range(0, len(df_sorted['Peak Time']) - 1)) medium_peaks_frequency[:] = [np.nan for _ in medium_peaks_frequency] medium_peaks_frequency.insert(0, len(medium_peaks_number) / (self.x[-1] - self.x[0])) small_peaks_frequency = list(range(0, len(df_sorted['Peak Time']) - 1)) small_peaks_frequency[:] = [np.nan for _ in small_peaks_frequency] small_peaks_frequency.insert(0, len(small_peaks_number) / (self.x[-1] - self.x[0])) final_dataset = list(zip(file_name, df_sorted['Peak Time'], df_sorted['Amplitude'], df_sorted['Relative Amplitude \n (F/Fmax)'], absolute_amplitude_max, normalised_amplitude, relative_normalised_amplitude, normalised_amplitude_max, periods, frequencies, half_decay_time, half_decay_amplitude, df_sorted['Peak Start Time'], df_sorted['Peak Start Ordinate'], df_sorted['Peak Stop Time'], df_sorted['Peak Stop Ordinate'], peak_up_time, peak_down_time, peak_full_time, peak_area, big_peaks_frequency, medium_peaks_frequency, small_peaks_frequency)) final_dataframe = pd.DataFrame(data=final_dataset, columns=['File name', 'Peak time', 'Absolute amplitude', 'Absolute amplitude (%)', 'Absolute amplitude MAX', 'Normalised amplitude', 'Normalised amplitude (%)', 'Normalised amplitude MAX', 'Period', 'Frequency', 'Half-decay time', 'Half-decay amplitude', 'Start time', 'Start ordinate', 'Stop time', 'Stop ordinate', 'Ascending time', 'Decay time', 'Full peak time', 'AUC', 'Big peaks, Hz', 'Mid peaks, Hz', 'Small peaks, Hz']) # specify data for export acording to the settings tab in GUI # and append current analysed dataset to existing ones try: columns_to_delete_for_export = [] if not self.chbxFileName.isChecked(): columns_to_delete_for_export.append('File name') if not self.chbxPeakTime.isChecked(): columns_to_delete_for_export.append('Peak time') if not self.chbxAmplAbs.isChecked(): columns_to_delete_for_export.append('Absolute amplitude') if not self.chbxAmplAbsRel.isChecked(): columns_to_delete_for_export.append('Absolute amplitude (%)') if not self.chbxAmplAbsMax.isChecked(): columns_to_delete_for_export.append('Absolute amplitude MAX') if not self.chbxAmplNorm.isChecked(): columns_to_delete_for_export.append('Normalised amplitude') if not self.chbxAmplNormRel.isChecked(): columns_to_delete_for_export.append('Normalised amplitude (%)') if not self.chbxAmplNormMax.isChecked(): columns_to_delete_for_export.append('Normalised amplitude MAX') if not self.chbxPeriod.isChecked(): columns_to_delete_for_export.append('Period') if not self.chbxFreq.isChecked(): columns_to_delete_for_export.append('Frequency') if not self.chbxHalfDecayTime.isChecked(): columns_to_delete_for_export.append('Half-decay time') if not self.chbxHalfDecayAmpl.isChecked(): columns_to_delete_for_export.append('Half-decay amplitude') if not self.chbxLeftBorderTime.isChecked(): columns_to_delete_for_export.append('Start time') if not self.chbxLeftBorder.isChecked(): columns_to_delete_for_export.append('Start ordinate') if not self.chbxRightBorderTime.isChecked(): columns_to_delete_for_export.append('Stop time') if not self.chbxRightBorder.isChecked(): columns_to_delete_for_export.append('Stop ordinate') if not self.chbxTimeToPeak.isChecked(): columns_to_delete_for_export.append('Ascending time') if not self.chbxDecayTime.isChecked(): columns_to_delete_for_export.append('Decay time') if not self.chbxFullPeakTime.isChecked(): columns_to_delete_for_export.append('Full peak time') if not self.chbxAUC.isChecked(): columns_to_delete_for_export.append('AUC') if not self.chbxSmallPeaks.isChecked(): columns_to_delete_for_export.append('Big peaks, Hz') if not self.chbxMidPeaks.isChecked(): columns_to_delete_for_export.append('Mid peaks, Hz') if not self.chbxBigPeaks.isChecked(): columns_to_delete_for_export.append('Small peaks, Hz') final_dataframe.drop(columns_to_delete_for_export, axis=1, inplace=True) self.multiple_data_sets = self.multiple_data_sets.append(final_dataframe) if self.chbxSaveFig.isChecked(): os.makedirs('_Figures', exist_ok=True) dpi = self.BoxDPI.value() plt.savefig(os.path.join('_Figures', 'Fig_{figName}.png'.format(figName=self.graph_name)), dpi=dpi) del df del df_sorted del final_dataframe dialog = MessageBox.question(self, '', "Current dataset was analysed \n and added to previous ones (if exist). \n Would you like to load next file? ", QMessageBox.Yes, QMessageBox.No) if dialog == QMessageBox.Yes: self.load_file() else: self.rmmpl() self.BtnSaveFullDataset.setStyleSheet("background-color: #7CF2BD") self.BtnLoadFile.setStyleSheet("background-color: #7CF2BD") except: message = MessageBox() message.about(self, 'Warning!', "Data were not added to existing dataset. \n Plese be sure that you did not change the output settings.")