def test_load_zip_file(): # Testing manager = DBManager('applewatch.oi', overwrite=True, newfile=True) participant = Participant(name='My Participant', description='Participant Description') manager.update_participant(participant) importer = AppleWatchImporter(manager, participant) results = importer.load('/Users/dominic/WA/OpenIMU.git/python/applewatch_data.zip') # print('results', results) importer.import_to_database(results)
def test_load_data_file(): # Testing manager = DBManager('applewatch.oi', overwrite=True) participant = Participant(name='My Participant', description='Participant Description') manager.update_participant(participant) importer = AppleWatchImporter(manager, participant) results = importer.load('/Users/dominic/Documents/working_area/OpenIMU.git/python/watch_ProcessedMotion.data') # print('results', results) importer.import_to_database(results)
def test_load_zip_file(self): # Testing manager = DBManager('applewatch.db', overwrite=True) participant = Participant(name='My Participant', description='Participant Description') manager.update_participant(participant) importer = AppleWatchImporter(manager, participant) results = importer.load('../../../resources/samples/AppleWatch.zip') # print('results', results) importer.import_to_database(results)
def ok_clicked(self): # Do the importation table = self.UI.tableFiles # Create progress dialog dialog = ProgressDialog(table.rowCount(), self) dialog.setWindowTitle('Importation...') class Importer: def __init__(self, filename, importer): self.filename = filename self.importer = importer def process(self): print('Importer loading', self.filename) results = self.importer.load(self.filename) print('Importer saving to db') self.importer.import_to_database(results) if results is not None: results.clear() # Needed to clear the dict cache and let the garbage collector delete it! print('Importer done!') importers = [] for i in range(0, table.rowCount()): part = table.item(i, 1).data(Qt.UserRole) file_type = table.item(i, 2).data(Qt.UserRole) file_name = table.item(i, 3).text() data_importer = None if file_type == ImporterTypes.ACTIGRAPH: data_importer = ActigraphImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.WIMU: data_importer = WIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.OPENIMU: data_importer = OpenIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.APPLEWATCH: data_importer = AppleWatchImporter(manager=self.dbMan, participant=part) if data_importer is not None: importers.append(Importer(file_name, data_importer)) # results = data_importer.load(file_name) # data_importer.import_to_database(results) else: # TODO: Error message self.reject() # Run in background all importers (in sequence) all_functions = [] for importer in importers: all_functions.append(importer.process) process = BackgroundProcess(all_functions) process.finished.connect(dialog.accept) process.trigger.connect(dialog.trigger) process.start() # Show progress dialog dialog.exec() gc.collect() self.accept()
def ok_clicked(self): # Do the importation table = self.UI.tableFiles class Importer(WorkerTask): def __init__(self, filename, task_size, file_importer): super().__init__(filename, task_size) self.filename = filename self.importer = file_importer self.importer.update_progress.connect(self.update_progress) self.short_filename = DataSource.build_short_filename(self.filename) self.title = self.short_filename self.results = [] # For testing only @timing def load_data(self): file_md5 = DataSource.compute_md5(filename=self.filename).hexdigest() if not DataSource.datasource_exists_for_participant(filename=self.short_filename, participant=self.importer.participant, md5=file_md5, db_session=self.importer.db.session): self.results = self.importer.load(self.filename) return True return False # For testing only @timing def import_data(self): file_md5 = DataSource.compute_md5(filename=self.filename).hexdigest() if not DataSource.datasource_exists_for_participant(filename=self.short_filename, participant=self.importer.participant, md5=file_md5, db_session=self.importer.db.session): if self.results is not None: self.log_request.emit('Importation des données...', LogTypes.LOGTYPE_INFO) self.importer.import_to_database(self.results) self.results.clear() # Needed to clear the dict cache and let the garbage collector delete it! # Add datasources for that file for recordset in self.importer.recordsets: if not DataSource.datasource_exists_for_recordset(filename=self.short_filename, recordset=recordset, md5=file_md5, db_session=self.importer.db.session): ds = DataSource() ds.recordset = recordset ds.file_md5 = file_md5 ds.file_name = self.short_filename ds.update_datasource(db_session=self.importer.db.session) self.importer.clear_recordsets() self.log_request.emit('Importation du fichier complétée!', LogTypes.LOGTYPE_DONE) else: self.log_request.emit('Erreur lors du chargement du fichier: ' + self.importer.last_error, LogTypes.LOGTYPE_ERROR) else: self.log_request.emit("Données du fichier '" + self.filename + "' déjà présentes pour '" + self.importer.participant.name + "' - ignorées.", LogTypes.LOGTYPE_WARNING) @timing def process(self): file_md5 = DataSource.compute_md5(filename=self.filename).hexdigest() if not DataSource.datasource_exists_for_participant(filename=self.short_filename, participant=self.importer.participant, md5=file_md5, db_session=self.importer.db.session): self.log_request.emit("Chargement du fichier: '" + self.short_filename + "'", LogTypes.LOGTYPE_INFO) self.results = self.importer.load(self.filename) if self.results is not None: self.log_request.emit('Importation des données...', LogTypes.LOGTYPE_INFO) self.importer.import_to_database(self.results) self.results.clear() # Needed to clear the dict cache and let the garbage collector delete it! # Add datasources for that file for recordset in self.importer.recordsets: if not DataSource.datasource_exists_for_recordset(filename=self.short_filename , recordset=recordset, md5=file_md5, db_session=self.importer.db.session): ds = DataSource() ds.recordset = recordset ds.file_md5 = file_md5 ds.file_name = self.short_filename ds.update_datasource(db_session=self.importer.db.session) self.importer.clear_recordsets() self.log_request.emit('Importation du fichier complétée!', LogTypes.LOGTYPE_DONE) else: self.log_request.emit('Erreur lors du chargement du fichier: ' + self.importer.last_error, LogTypes.LOGTYPE_ERROR) else: self.log_request.emit("Données du fichier '" + self.filename + "' déjà présentes pour '" + self.importer.participant.name + "' - ignorées.", LogTypes.LOGTYPE_WARNING) importers = [] for i in range(0, table.rowCount()): part = table.item(i, 1).data(Qt.UserRole) file_type = table.item(i, 2).data(Qt.UserRole) file_name = table.item(i, 3).text() data_importer = None if file_type == ImporterTypes.ACTIGRAPH: data_importer = ActigraphImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.WIMU: data_importer = WIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.OPENIMU: data_importer = OpenIMUImporter(manager=self.dbMan, participant=part) if file_type == ImporterTypes.APPLEWATCH: data_importer = AppleWatchImporter(manager=self.dbMan, participant=part) if data_importer is not None: # importers.append(Importer(file_name, os.stat(file_name).st_size, data_importer)) importers.append(Importer(file_name, 100, data_importer)) # results = data_importer.load(file_name) # data_importer.import_to_database(results) else: # TODO: Error message self.reject() # Run in background all importers (in sequence) all_tasks = [] for importer in importers: importer.log_request.connect(self.log_request) all_tasks.append(importer) # Try loading in parallel (RAM INTENSIVE!) # process = BackgroundProcessForImporters(all_tasks) # For now process in series... process = BackgroundProcess(all_tasks) # Create progress dialog dialog = ProgressDialog(process, 'Importation des données', self) # Start tasks process.start() # Show progress dialog # self.showMinimized() dialog.exec() # gc.collect() self.accept()