def export(self): directory = self.UI.lineDir.text() print('Should export in : ', directory) class CSVExporter(WorkerTask): def __init__(self, dbmanager, directory): super().__init__('Exportation CSV', 0) self.dbMan = dbmanager self.directory = directory def process(self): print('Exporting in :', self.directory) self.dbMan.export_csv(directory) self.update_progress.emit(100) print('Exporting done!') exporter = CSVExporter(self.dbMan, directory) process = BackgroundProcess([exporter]) # Create progress dialog dialog = ProgressDialog(process, 'CSV Export', self) process.finished.connect(dialog.accept) process.start() # Show dialog dialog.exec() # Done self.accept()
def load_sensors_blocks(self): # Create request tasks tasks = [] for location in self.sensors_location: sensors = self.get_sensors_for_location(location) for sensor_id in sensors: for record in self.recordsets: tasks.append(DBSensorTimesTask(title="Chargement des données temporelles", db_manager=self.dbMan, sensor_id=sensor_id, recordset=record)) QGuiApplication.setOverrideCursor(Qt.BusyCursor) process = BackgroundProcess(tasks) dialog = ProgressDialog(process, "Chargement") process.start() dialog.exec() QGuiApplication.restoreOverrideCursor() # Combine tasks results self.sensors_blocks = {} for task in tasks: for result in task.results: if result['sensor_id'] not in self.sensors_blocks: self.sensors_blocks[result['sensor_id']] = [] start_time = result['start_time'] end_time = result['end_time'] data = {"start_time": start_time, "end_time": end_time} self.sensors_blocks[result['sensor_id']].append(data)
def export(self): directory = self.UI.lineDir.text() file_format = self.UI.comboFormat.currentText() print('Should export in : ', directory) class FileExporter(WorkerTask): def __init__(self, _dbman, _format, _directory): super().__init__('Exportation :' + _format, 0) self.dbMan = _dbman self.directory = _directory self.format = _format def process(self): print('Exporting in :', self.directory) self.dbMan.export_file(self.format, self.directory) self.update_progress.emit(100) print('Exporting done!') exporter = FileExporter(self.dbMan, file_format, directory) process = BackgroundProcess([exporter]) # Create progress dialog dialog = ProgressDialog(process, 'File Export to format: ' + file_format, self) process.finished.connect(dialog.accept) process.start() # Show dialog dialog.exec() # Done self.accept()
def get_sensor_data(self, sensor, start_time=None, end_time=None): QGuiApplication.setOverrideCursor(Qt.BusyCursor) task = DBSensorAllDataTask("Chargement des données...", self.dbMan, sensor, start_time, end_time, self.recordsets) process = BackgroundProcess([task]) dialog = ProgressDialog(process, "Traitement") process.start() dialog.exec() QGuiApplication.restoreOverrideCursor() return task.results['timeseries'], task.results['channel_data']
def db_compact_requested(self): msg = QMessageBox(self) msg.setIcon(QMessageBox.Question) msg.setStyleSheet("QPushButton{min-width: 100px; min-height: 40px;}") msg.setText( "Le fichier de données sera nettoyé. Ceci peut prendre un certain temps. \n" "Désirez-vous poursuivre?") msg.setWindowTitle("Compactage des données") msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No) rval = msg.exec() if rval == QMessageBox.Yes: task = SimpleTask("Compactage des données", self.dbMan.compact) process = BackgroundProcess([task]) dialog = ProgressDialog(process, 'Nettoyage', self) process.start() dialog.exec()
def export(self): directory = self.UI.lineDir.text() print('Should export in : ', directory) # Create progress dialog dialog = ProgressDialog(1, self) dialog.setWindowTitle('Exportation CSV...') class CSVExporter: def __init__(self, dbmanager, directory): self.dbMan = dbmanager self.directory = directory def process(self): print('Exporting in :', self.directory) self.dbMan.export_csv(directory) print('Exporting done!') exporter = CSVExporter(self.dbMan, directory) process = BackgroundProcess([exporter.process]) process.finished.connect(dialog.accept) process.trigger.connect(dialog.trigger) process.start() # Show dialog dialog.exec() # Done self.accept()
def ok_clicked(self): # Do the importation table = self.UI.tableFiles # Create progress dialog dialog = ProgressDialog(table.rowCount(), self) dialog.setWindowTitle('Importation...') class Importer: def __init__(self, filename, importer): self.filename = filename self.importer = importer def process(self): print('Importer loading', self.filename) results = self.importer.load(self.filename) print('Importer saving to db') self.importer.import_to_database(results) if results is not None: results.clear() # Needed to clear the dict cache and let the garbage collector delete it! print('Importer done!') importers = [] for i in range(0, table.rowCount()): part = table.item(i, 1).data(Qt.UserRole) file_type = table.item(i, 2).data(Qt.UserRole) file_name = table.item(i, 3).text() data_importer = None if file_type == ImporterTypes.ACTIGRAPH: data_importer = ActigraphImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.WIMU: data_importer = WIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.OPENIMU: data_importer = OpenIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.APPLEWATCH: data_importer = AppleWatchImporter(manager=self.dbMan, participant=part) if data_importer is not None: importers.append(Importer(file_name, data_importer)) # results = data_importer.load(file_name) # data_importer.import_to_database(results) else: # TODO: Error message self.reject() # Run in background all importers (in sequence) all_functions = [] for importer in importers: all_functions.append(importer.process) process = BackgroundProcess(all_functions) process.finished.connect(dialog.accept) process.trigger.connect(dialog.trigger) process.start() # Show progress dialog dialog.exec() gc.collect() self.accept()
def delete_requested(self): item_id = self.UI.treeDataSet.get_item_id( self.UI.treeDataSet.currentItem()) item_type = self.UI.treeDataSet.get_item_type( self.UI.treeDataSet.currentItem()) # if item_type == "recordsets" or item_type == "results": # return msg = QMessageBox(self) msg.setIcon(QMessageBox.Question) msg.setStyleSheet("QPushButton{min-width: 100px; min-height: 40px;}") msg.setText("Désirez-vous vraiment supprimer \"" + self.UI.treeDataSet.currentItem().text(0) + "\" et tous les éléments associés?") msg.setWindowTitle("Confirmation de suppression") msg.setStandardButtons(QMessageBox.Yes | QMessageBox.No) rval = msg.exec() if rval == QMessageBox.Yes: item_name = self.UI.treeDataSet.currentItem().text(0) tasks = [] if item_type == "group": group = self.UI.treeDataSet.groups[item_id] self.UI.treeDataSet.remove_group(group) task = SimpleTask("Suppression de '" + group.name + "'", self.dbMan.delete_group, group) tasks.append(task) if item_type == "participant": part = self.UI.treeDataSet.participants[item_id] self.UI.treeDataSet.remove_participant(part) task = SimpleTask("Suppression de '" + part.name + "'", self.dbMan.delete_participant, part) tasks.append(task) if item_type == "recordset": # Find and remove all related results for result in self.UI.treeDataSet.results.values(): if result is not None: for ref in result.processed_data_ref: if ref.recordset.id_recordset == item_id: self.UI.treeDataSet.remove_result(result) task = SimpleTask( "Suppression de '" + result.name + "'", self.dbMan.delete_processed_data, result) tasks.append(task) # self.dbMan.delete_processed_data(result) break recordset = self.UI.treeDataSet.recordsets[item_id] task = SimpleTask("Suppression de '" + recordset.name + "'", self.dbMan.delete_recordset, recordset) tasks.append(task) # self.dbMan.delete_recordset(recordset) self.UI.treeDataSet.remove_recordset(recordset) if item_type == "result": result = self.UI.treeDataSet.results[item_id] task = SimpleTask("Suppression de '" + result.name + "'", self.dbMan.delete_processed_data, result) tasks.append(task) self.UI.treeDataSet.remove_result(result) # self.dbMan.delete_processed_data(result) if item_type == "date": # Delete all recordsets related to that date id_participant = self.UI.treeDataSet.get_item_id( self.UI.treeDataSet.currentItem().parent().parent()) search_date = self.UI.treeDataSet.dates[ Treedatawidget.get_date_id( self.UI.treeDataSet.currentItem().text(0), id_participant)] recordsets = self.dbMan.get_all_recordsets( start_date=search_date) part_id = None for recordset in recordsets: if part_id is None: part_id = recordset.id_participant task = SimpleTask( "Suppression de '" + recordset.name + "'", self.dbMan.delete_recordset, recordset) tasks.append(task) self.UI.treeDataSet.remove_recordset(recordset) self.UI.treeDataSet.remove_date( self.UI.treeDataSet.currentItem().text(0), part_id) if item_type == "recordsets": # Delete all recordsets for that participant participant = self.UI.treeDataSet.participants[ self.UI.treeDataSet.get_item_id( self.UI.treeDataSet.currentItem().parent())] recordsets = self.dbMan.get_all_recordsets( participant=participant) for recordset in recordsets: task = SimpleTask( "Suppression de '" + recordset.name + "'", self.dbMan.delete_recordset, recordset) tasks.append(task) self.UI.treeDataSet.remove_recordset(recordset) # Remove all dates from the view self.UI.treeDataSet.remove_dates_for_participant( participant.id_participant) if item_type == "results": pass if tasks: process = BackgroundProcess(tasks) # Create progress dialog dialog = ProgressDialog(process, 'Suppression', self) # Start tasks process.start() dialog.exec() # self.dbMan.clean_db() self.add_to_log(item_name + " a été supprimé.", LogTypes.LOGTYPE_DONE) self.clear_main_widgets()
def on_process_button_clicked(self): if self.factory is not None: class Processor(WorkerTask): def __init__(self, title, algor, dbmanager, recordsets, parent=None): super(Processor, self).__init__(title, 0, parent) self.algo = algor self.dbMan = dbmanager self.recordsets = recordsets self.results = {} def process(self): print('Processor starting') self.results = algo.calculate(self.dbMan, self.recordsets) print('results:', self.results) print('Processor done!') def get_results(self): print('getting results') return self.results # Initialize processor params = self.factory.params() algo = self.factory.create(params) # Remove recordsets that don't have the required sensors required_sensors = self.factory.required_sensors() for recordset in self.recordsets: sensors = self.dbMan.get_sensors(recordset) sensors_types = [] for sensor in sensors: sensors_types.append(sensor.id_sensor_type) ok = all(elem in sensors_types for elem in required_sensors) if not ok: self.recordsets.remove(recordset) # Create background process processor = Processor(title=self.UI.lblNameValue.text(), algor=algo, dbmanager=self.dbMan, recordsets=self.recordsets) process = BackgroundProcess([processor]) # Create progress dialog dialog = ProgressDialog(process, 'Analyse des données', self) # process.finished.connect(dialog.accept) # process.trigger.connect(dialog.trigger) process.start() dialog.exec() results = processor.get_results() # results = algo.calculate(self.dbMan, self.recordsets) print('Algo results', results) # window = QMainWindow(self) # window.setWindowTitle('Results: ' + self.factory.info()['name']) # widget = ResultWindow(self) # widget.display_freedson_1998(results, self.recordsets) # window.setCentralWidget(widget) # window.resize(800, 600) # window.show() # Save to database name = self.factory.info( )['name'] + " - " + self.recordsets[0].name if len(self.recordsets) > 1: name += " @ " + self.recordsets[len(self.recordsets) - 1].name self.processed_data = self.dbMan.add_processed_data( self.factory.info()['unique_id'], name, results, self.recordsets, params) self.accept()
def ok_clicked(self): # Do the importation table = self.UI.tableFiles class Importer(WorkerTask): def __init__(self, filename, task_size, file_importer): super().__init__(filename, task_size) self.filename = filename self.importer = file_importer self.importer.update_progress.connect(self.update_progress) self.short_filename = DataSource.build_short_filename(self.filename) self.title = self.short_filename self.results = [] # For testing only @timing def load_data(self): file_md5 = DataSource.compute_md5(filename=self.filename).hexdigest() if not DataSource.datasource_exists_for_participant(filename=self.short_filename, participant=self.importer.participant, md5=file_md5, db_session=self.importer.db.session): self.results = self.importer.load(self.filename) return True return False # For testing only @timing def import_data(self): file_md5 = DataSource.compute_md5(filename=self.filename).hexdigest() if not DataSource.datasource_exists_for_participant(filename=self.short_filename, participant=self.importer.participant, md5=file_md5, db_session=self.importer.db.session): if self.results is not None: self.log_request.emit('Importation des données...', LogTypes.LOGTYPE_INFO) self.importer.import_to_database(self.results) self.results.clear() # Needed to clear the dict cache and let the garbage collector delete it! # Add datasources for that file for recordset in self.importer.recordsets: if not DataSource.datasource_exists_for_recordset(filename=self.short_filename, recordset=recordset, md5=file_md5, db_session=self.importer.db.session): ds = DataSource() ds.recordset = recordset ds.file_md5 = file_md5 ds.file_name = self.short_filename ds.update_datasource(db_session=self.importer.db.session) self.importer.clear_recordsets() self.log_request.emit('Importation du fichier complétée!', LogTypes.LOGTYPE_DONE) else: self.log_request.emit('Erreur lors du chargement du fichier: ' + self.importer.last_error, LogTypes.LOGTYPE_ERROR) else: self.log_request.emit("Données du fichier '" + self.filename + "' déjà présentes pour '" + self.importer.participant.name + "' - ignorées.", LogTypes.LOGTYPE_WARNING) @timing def process(self): file_md5 = DataSource.compute_md5(filename=self.filename).hexdigest() if not DataSource.datasource_exists_for_participant(filename=self.short_filename, participant=self.importer.participant, md5=file_md5, db_session=self.importer.db.session): self.log_request.emit("Chargement du fichier: '" + self.short_filename + "'", LogTypes.LOGTYPE_INFO) self.results = self.importer.load(self.filename) if self.results is not None: self.log_request.emit('Importation des données...', LogTypes.LOGTYPE_INFO) self.importer.import_to_database(self.results) self.results.clear() # Needed to clear the dict cache and let the garbage collector delete it! # Add datasources for that file for recordset in self.importer.recordsets: if not DataSource.datasource_exists_for_recordset(filename=self.short_filename , recordset=recordset, md5=file_md5, db_session=self.importer.db.session): ds = DataSource() ds.recordset = recordset ds.file_md5 = file_md5 ds.file_name = self.short_filename ds.update_datasource(db_session=self.importer.db.session) self.importer.clear_recordsets() self.log_request.emit('Importation du fichier complétée!', LogTypes.LOGTYPE_DONE) else: self.log_request.emit('Erreur lors du chargement du fichier: ' + self.importer.last_error, LogTypes.LOGTYPE_ERROR) else: self.log_request.emit("Données du fichier '" + self.filename + "' déjà présentes pour '" + self.importer.participant.name + "' - ignorées.", LogTypes.LOGTYPE_WARNING) importers = [] for i in range(0, table.rowCount()): part = table.item(i, 1).data(Qt.UserRole) file_type = table.item(i, 2).data(Qt.UserRole) file_name = table.item(i, 3).text() data_importer = None if file_type == ImporterTypes.ACTIGRAPH: data_importer = ActigraphImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.WIMU: data_importer = WIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.OPENIMU: data_importer = OpenIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.APPLEWATCH: data_importer = AppleWatchImporter(manager=self.dbMan, participant=part) if data_importer is not None: # importers.append(Importer(file_name, os.stat(file_name).st_size, data_importer)) importers.append(Importer(file_name, 100, data_importer)) # results = data_importer.load(file_name) # data_importer.import_to_database(results) else: # TODO: Error message self.reject() # Run in background all importers (in sequence) all_tasks = [] for importer in importers: importer.log_request.connect(self.log_request) all_tasks.append(importer) # Try loading in parallel (RAM INTENSIVE!) # process = BackgroundProcessForImporters(all_tasks) # For now process in series... process = BackgroundProcess(all_tasks) # Create progress dialog dialog = ProgressDialog(process, 'Importation des données', self) # Start tasks process.start() # Show progress dialog # self.showMinimized() dialog.exec() # gc.collect() self.accept()
def on_process_button_clicked(self): print('on_process_button_clicked') if self.factory is not None: class Processor: def __init__(self, algo, dbmanager, recordsets): self.algo = algo self.dbMan = dbmanager self.recordsets = recordsets self.results = {} def process(self): print('Processor starting') self.results = algo.calculate(self.dbMan, self.recordsets) print('results:', self.results) print('Processor done!') def get_results(self): print('getting results') return self.results # For testing, should display a configuration GUI first params = {} algo = self.factory.create(params) # Create background process processor = Processor(algo, self.dbMan, self.recordsets) process = BackgroundProcess([processor.process]) # Create progress dialog dialog = ProgressDialog(1, self) dialog.setWindowTitle('Traitement...') process.finished.connect(dialog.accept) process.trigger.connect(dialog.trigger) process.start() dialog.exec() results = processor.get_results() # results = algo.calculate(self.dbMan, self.recordsets) print('Algo results', results) """window = QMainWindow(self) window.setWindowTitle('Results: ' + self.factory.info()['name']) widget = ResultWindow(self) widget.display_freedson_1998(results, self.recordsets) window.setCentralWidget(widget) window.resize(800, 600) window.show()""" # Save to database name = self.factory.info( )['name'] + " - " + self.recordsets[0].name if len(self.recordsets) > 1: name += " @ " + self.recordsets[len(self.recordsets) - 1].name self.processed_data = self.dbMan.add_processed_data( self.factory.info()['unique_id'], name, results, self.recordsets) self.accept()