def export_2_splite(self, target_db, dest_srid): """ Exports a datagbase to a new spatialite database file :param target_db: The name of the new database file :param dest_srid: :return: """ self.source_dbconnection = db_utils.DbConnectionManager() self.source_dbconnection.connect2db() #establish connection to the current midv db self.dest_dbconnection = db_utils.DbConnectionManager(target_db) self.dest_dbconnection.connect2db() self.midv_data_importer = midv_data_importer() self.write_data(self.to_sql, None, defs.get_subset_of_tables_fr_db(category='data_domains'), replace=True) self.dest_dbconnection.commit() self.write_data(self.to_sql, self.ID_obs_points, defs.get_subset_of_tables_fr_db(category='obs_points')) self.dest_dbconnection.commit() self.write_data(self.to_sql, self.ID_obs_lines, defs.get_subset_of_tables_fr_db(category='obs_lines')) self.dest_dbconnection.commit() db_utils.delete_srids(self.dest_dbconnection.cursor, dest_srid) self.dest_dbconnection.commit() #Statistics statistics = self.get_table_rows_with_differences() self.dest_dbconnection.cursor.execute('vacuum') utils.MessagebarAndLog.info(bar_msg=ru(QCoreApplication.translate('ExportData', "Export done, see differences in log message panel")), log_msg=ru(QCoreApplication.translate('ExportData', "Tables with different number of rows:\n%s"))%statistics) self.dest_dbconnection.commit_and_closedb() self.source_dbconnection.closedb()
def start_import(self, all_lab_results, lablitteras_to_import): all_lab_results = copy.deepcopy(all_lab_results) all_lab_results = dict([ (lablittera, v) for lablittera, v in all_lab_results.iteritems() if lablittera in lablitteras_to_import ]) #Allow the user to connect the metadata rows to obsids. meta_headers = get_metadata_headers(all_lab_results) ask_obsid_table = [meta_headers] for lablittera, v in sorted(all_lab_results.iteritems()): metarow = [ v[u'metadata'].get(meta_header, u'') for meta_header in meta_headers ] ask_obsid_table.append(metarow) existing_obsids = utils.get_all_obsids() answer = utils.filter_nonexisting_values_and_ask( ask_obsid_table, u'obsid', existing_values=existing_obsids, try_capitalize=False, always_ask_user=True) if answer == u'cancel': self.status = True return Cancel() elif not answer: self.status = False utils.MessagebarAndLog.critical( bar_msg=u'Error, no observations remain. No import done.') return Cancel() else: remaining_lablitteras_obsids = dict([(x[0], x[-1]) for x in answer[1:]]) #Filter the remaining lablitteras and add an obsid field _all_lab_results = {} for lablittera, v in all_lab_results.iteritems(): if lablittera in remaining_lablitteras_obsids: v[u'metadata'][u'obsid'] = remaining_lablitteras_obsids[ lablittera] _all_lab_results[lablittera] = v all_lab_results = _all_lab_results self.wquallab_data_table = self.to_table(all_lab_results) importer = import_data_to_db.midv_data_importer() answer = importer.general_import(goal_table=u'w_qual_lab', file_data=self.wquallab_data_table) importer.SanityCheckVacuumDB() if self.close_after_import.isChecked(): self.close() PyQt4.QtGui.QApplication.restoreOverrideCursor()
def setUp(self): self.iface = DummyInterface() self.midvatten = midvatten.midvatten(self.iface) try: os.remove(TestObsPointsTriggers.temp_db_path) except OSError: pass self.midvatten.new_db() self.importinstance = midv_data_importer() utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_geom_fr_coords""") utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_geom_fr_coords""") utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_coords_fr_geom""") utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_coords_fr_geom""")
def setUp(self, mock_savefilename, mock_crsquestion, mock_qgsproject_instance): mock_crsquestion.return_value = [3006] mock_savefilename.return_value = TEMP_DB_PATH mock_qgsproject_instance.return_value.instance.readEntry.return_value = [u'en_US'] self.dummy_iface = DummyInterface2() self.iface = self.dummy_iface.mock self.midvatten = midvatten.midvatten(self.iface) try: os.remove(TEMP_DB_PATH) except OSError: pass self.midvatten.new_db() self.importinstance = midv_data_importer()
def setUp(self, mock_locale): self.iface = DummyInterface() self.midvatten = midvatten(self.iface) try: os.remove(TestObsPointsTriggers.temp_db_path) except OSError: pass mock_locale.return_value.answer = u'ok' mock_locale.return_value.value = u'sv_SE' self.midvatten.new_db() self.importinstance = midv_data_importer() utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_geom_fr_coords""") utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_geom_fr_coords""") utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_coords_fr_geom""") utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_coords_fr_geom""")
def setUp(self, mock_savefilename, mock_crsquestion, mock_qgsproject_instance, mock_locale): mock_crsquestion.return_value = [3006] mock_savefilename.return_value = TEMP_DB_PATH mock_qgsproject_instance.return_value.readEntry = MIDV_DICT self.dummy_iface = DummyInterface2() self.iface = self.dummy_iface.mock self.midvatten = midvatten(self.iface) try: os.remove(TEMP_DB_PATH) except OSError: pass mock_locale.return_value.answer = u'ok' mock_locale.return_value.value = u'sv_SE' self.midvatten.new_db() self.importinstance = midv_data_importer()
def start_import(self, observations): """ :param observations: :return: """ observations = copy.deepcopy(observations) #Start by saving the parameter settings self.update_stored_settings(self.stored_settings, self.parameter_imports) self.save_stored_settings(self.ms, self.stored_settings, self.stored_settingskey) chosen_methods = [import_method_chooser.import_method for import_method_chooser in self.parameter_imports.values() if import_method_chooser.import_method] if not chosen_methods: utils.pop_up_info("Must choose at least one parameter import method") utils.MessagebarAndLog.critical(bar_msg="No parameter import method chosen") return Cancel() #Update the observations using the general settings, filters and parameter settings observations = self.filter_and_alter_data(observations, self.settings, self.settings_with_own_loop, self.parameter_imports) if isinstance(observations, Cancel): utils.MessagebarAndLog.warning(bar_msg=u"No observations left to import after filtering") return None observations_importmethods = {} for observation in observations: if self.parameter_imports[observation[u'parametername']].import_method: observations_importmethods.setdefault(self.parameter_imports[observation[u'parametername']].import_method, []).append(observation) importer = import_data_to_db.midv_data_importer() data_preparers = {u'w_levels': self.prepare_w_level_data, u'w_flow': self.prepare_w_flow_data, u'w_qual_field': self.prepare_w_qual_field_data, u'comments': self.prepare_comments_data} for import_method, observations in observations_importmethods.iteritems(): if import_method: file_data = data_preparers[import_method](observations) importer.send_file_data_to_importer(file_data, partial(importer.general_csv_import, goal_table=import_method)) importer.SanityCheckVacuumDB() PyQt4.QtGui.QApplication.restoreOverrideCursor()
def setUp(self): super(MidvattenTestPostgisDbSvImportInstance, self).setUp() self.importinstance = midv_data_importer()
def setUp(self): super(MidvattenTestSpatialiteDbSvImportInstance, self).setUp() self.importinstance = midv_data_importer()
def start_import(self, files, skip_rows_without_water_level, confirm_names, import_all_data, from_date=None, to_date=None, export_csv=False, import_to_db=True): """ """ utils.start_waiting_cursor( ) #show the user this may take a long time... parsed_files = [] for selected_file in files: try: res = self.parse_func( path=selected_file, charset=self.charsetchoosen, skip_rows_without_water_level=skip_rows_without_water_level, begindate=from_date, enddate=to_date) except: utils.MessagebarAndLog.critical(bar_msg=ru( QCoreApplication.translate('LeveloggerImport', '''Error on file %s.''')) % selected_file) raise if res == 'cancel': self.status = True utils.stop_waiting_cursor() return res elif res in ('skip', 'ignore'): continue try: file_data, filename, location = res except Exception as e: utils.MessagebarAndLog.warning( bar_msg=QCoreApplication.translate( 'DiverofficeImport', 'Import error, see log message panel'), log_msg=ru( QCoreApplication.translate( 'DiverofficeImport', 'File %s could not be parsed. Msg:\n%s')) % (selected_file, str(e))) continue parsed_files.append((file_data, filename, location)) if len(parsed_files) == 0: utils.MessagebarAndLog.critical(bar_msg=QCoreApplication.translate( 'DiverofficeImport', "Import Failure: No files imported" "")) utils.stop_waiting_cursor() return #Add obsid to all parsed filedatas by asking the user for it. filename_location_obsid = [['filename', 'location', 'obsid']] filename_location_obsid.extend( [[parsed_file[1], parsed_file[2], parsed_file[2]] for parsed_file in parsed_files]) if confirm_names: try_capitalize = False else: try_capitalize = True existing_obsids = utils.get_all_obsids() utils.stop_waiting_cursor() filename_location_obsid = utils.filter_nonexisting_values_and_ask( file_data=filename_location_obsid, header_value='obsid', existing_values=existing_obsids, try_capitalize=try_capitalize, always_ask_user=confirm_names) utils.start_waiting_cursor() if len(filename_location_obsid) < 2: utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate( 'DiverofficeImport', 'Warning. All files were skipped, nothing imported!')) utils.stop_waiting_cursor() return False filenames_obsid = dict([(x[0], x[2]) for x in filename_location_obsid[1:]]) parsed_files_with_obsid = [] for file_data, filename, location in parsed_files: if filename in filenames_obsid: file_data = list(file_data) obsid = filenames_obsid[filename] file_data[0].append('obsid') [row.append(obsid) for row in file_data[1:]] parsed_files_with_obsid.append([file_data, filename, location]) #Header file_to_import_to_db = [parsed_files_with_obsid[0][0][0]] file_to_import_to_db.extend([ row for parsed_file in parsed_files_with_obsid for row in parsed_file[0][1:] ]) # Add comment to import: #file_to_import_to_db[0].append('comment') #comment = '' #[row.append(comment) for row in file_to_import_to_db[1:]] if not import_all_data: file_to_import_to_db = self.filter_dates_from_filedata( file_to_import_to_db, utils.get_last_logger_dates()) if len(file_to_import_to_db) < 2: utils.MessagebarAndLog.info(bar_msg=QCoreApplication.translate( 'DiverofficeImport', 'No new data existed in the files. Nothing imported.')) self.status = 'True' utils.stop_waiting_cursor() return True if import_to_db: importer = import_data_to_db.midv_data_importer() answer = importer.general_import('w_levels_logger', file_to_import_to_db) if export_csv: path = qgis.PyQt.QtWidgets.QFileDialog.getSaveFileName( self, 'Save File', '', 'CSV(*.csv)') if path: path = ru(path[0]) utils.write_printlist_to_file(path, file_to_import_to_db) utils.stop_waiting_cursor() if self.close_after_import.isChecked(): self.close()
def start_import(self, files, skip_rows_without_water_level, confirm_names, import_all_data, from_date=None, to_date=None): """ """ utils.start_waiting_cursor() #show the user this may take a long time... parsed_files = [] for selected_file in files: res = self.parse_func(path=selected_file, charset=self.charsetchoosen, skip_rows_without_water_level=skip_rows_without_water_level, begindate=from_date, enddate=to_date) if res == 'cancel': self.status = True utils.stop_waiting_cursor() return res elif res in ('skip', 'ignore'): continue try: file_data, filename, location = res except Exception as e: utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate('DiverofficeImport', 'Import error, see log message panel'), log_msg=ru(QCoreApplication.translate('DiverofficeImport', 'File %s could not be parsed. Msg:\n%s'))%(selected_file, str(e))) continue parsed_files.append((file_data, filename, location)) if len(parsed_files) == 0: utils.MessagebarAndLog.critical(bar_msg=QCoreApplication.translate('DiverofficeImport', "Import Failure: No files imported""")) utils.stop_waiting_cursor() return #Add obsid to all parsed filedatas by asking the user for it. filename_location_obsid = [['filename', 'location', 'obsid']] filename_location_obsid.extend([[parsed_file[1], parsed_file[2], parsed_file[2]] for parsed_file in parsed_files]) if confirm_names: try_capitalize = False else: try_capitalize = True existing_obsids = utils.get_all_obsids() utils.stop_waiting_cursor() filename_location_obsid = utils.filter_nonexisting_values_and_ask(file_data=filename_location_obsid, header_value='obsid', existing_values=existing_obsids, try_capitalize=try_capitalize, always_ask_user=confirm_names) utils.start_waiting_cursor() if len(filename_location_obsid) < 2: utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate('DiverofficeImport', 'Warning. All files were skipped, nothing imported!')) utils.stop_waiting_cursor() return False filenames_obsid = dict([(x[0], x[2]) for x in filename_location_obsid[1:]]) parsed_files_with_obsid = [] for file_data, filename, location in parsed_files: if filename in filenames_obsid: file_data = list(file_data) obsid = filenames_obsid[filename] file_data[0].append('obsid') [row.append(obsid) for row in file_data[1:]] parsed_files_with_obsid.append([file_data, filename, location]) #Header file_to_import_to_db = [parsed_files_with_obsid[0][0][0]] file_to_import_to_db.extend([row for parsed_file in parsed_files_with_obsid for row in parsed_file[0][1:]]) if not import_all_data: file_to_import_to_db = self.filter_dates_from_filedata(file_to_import_to_db, utils.get_last_logger_dates()) if len(file_to_import_to_db) < 2: utils.MessagebarAndLog.info(bar_msg=QCoreApplication.translate('DiverofficeImport', 'No new data existed in the files. Nothing imported.')) self.status = 'True' utils.stop_waiting_cursor() return True importer = import_data_to_db.midv_data_importer() answer = importer.general_import('w_levels_logger', file_to_import_to_db) utils.stop_waiting_cursor() if self.close_after_import.isChecked(): self.close()
def start_import(self): """ TODO: I have NO IDEA where the dummy parameter is coming from. It gets the value False for some reason! :param dummy: :return: """ if self.file_data is None: raise utils.UsageError(ru(QCoreApplication.translate('GeneralCsvImportGui', 'Error, must select a file first!'))) translation_dict = self.table_chooser.get_translation_dict() file_data = copy.deepcopy(self.file_data) goal_table = self.table_chooser.import_method foreign_keys = db_utils.get_foreign_keys(goal_table) foreign_key_obsid_tables = [tname for tname, colnames in foreign_keys.items() for colname in colnames if colname[0] == 'obsid'] if len(foreign_key_obsid_tables) == 1: foreign_key_obsid_table = foreign_key_obsid_tables[0] else: foreign_key_obsid_table = goal_table for file_column in list(translation_dict.keys()): alter_colnames = [] new_value = None # Check if obsid should be set from selection and add an obsid-column if so. if isinstance(file_column, Obsids_from_selection): selected = utils.get_selected_features_as_tuple() if len(selected) != 1: utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('GeneralCsvImportGui', 'Import error, must select 1 obsid')), duration=60) return 'cancel' alter_colnames = ['obsid'] new_value = selected[0] elif isinstance(file_column, StaticValue): if translation_dict[file_column]: alter_colnames = translation_dict[file_column] new_value = file_column.value for alter_colname in alter_colnames: if alter_colnames is not None and new_value is not None: try: colindex = file_data[0].index(alter_colname) except ValueError: colindex = len(file_data[0]) file_data[0].append(alter_colname) for row in file_data[1:]: if colindex + 1 < len(file_data[0]): row[colindex] = new_value else: row.append(new_value) #[row.insert(obsidindex, selected[0]) if obsidindex + 1 < len(file_data[0]) else row.append(selected[0]) for row in file_data[1:]] del translation_dict[file_column] translation_dict[alter_colname] = [alter_colname] columns_factors = self.table_chooser.get_columns_factors_dict() #Translate column names and add columns that appear more than once file_data = self.translate_and_reorder_file_data(file_data, translation_dict) file_data = self.convert_comma_to_points_for_double_columns(file_data, self.tables_columns_info[goal_table]) if columns_factors: file_data = self.multiply_by_factor(file_data, columns_factors) file_data = self.remove_preceding_trailing_spaces_tabs(file_data) if foreign_key_obsid_table and foreign_key_obsid_table != goal_table and 'obsid' in file_data[0]: file_data = utils.filter_nonexisting_values_and_ask(file_data, 'obsid', utils.get_all_obsids(foreign_key_obsid_table), try_capitalize=False) file_data = self.reformat_date_time(file_data) importer = import_data_to_db.midv_data_importer() answer = importer.general_import(goal_table=goal_table, file_data=file_data) utils.stop_waiting_cursor() if self.close_after_import.isChecked(): self.close()
def start_import(self): """ TODO: I have NO IDEA where the dummy parameter is coming from. It gets the value False for some reason! :param dummy: :return: """ if self.file_data is None: raise utils.UsageError(ru(QCoreApplication.translate('GeneralCsvImportGui', 'Error, must select a file first!'))) translation_dict = self.table_chooser.get_translation_dict() file_data = copy.deepcopy(self.file_data) dest_table = self.table_chooser.import_method foreign_keys = db_utils.get_foreign_keys(dest_table) foreign_key_obsid_tables = [tname for tname, colnames in foreign_keys.items() for colname in colnames if colname[0] == 'obsid'] if len(foreign_key_obsid_tables) == 1: foreign_key_obsid_table = foreign_key_obsid_tables[0] else: foreign_key_obsid_table = dest_table for file_column in list(translation_dict.keys()): alter_colnames = [] new_value = None # Check if obsid should be set from selection and add an obsid-column if so. if isinstance(file_column, Obsids_from_selection): selected = utils.get_selected_features_as_tuple() if len(selected) != 1: utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('GeneralCsvImportGui', 'Import error, must select 1 obsid')), duration=60) return 'cancel' alter_colnames = ['obsid'] new_value = selected[0] elif isinstance(file_column, StaticValue): if translation_dict[file_column]: alter_colnames = translation_dict[file_column] new_value = file_column.value for alter_colname in alter_colnames: if alter_colnames is not None and new_value is not None: try: colindex = file_data[0].index(alter_colname) except ValueError: colindex = len(file_data[0]) file_data[0].append(alter_colname) for row in file_data[1:]: if colindex + 1 < len(file_data[0]): row[colindex] = new_value else: row.append(new_value) #[row.insert(obsidindex, selected[0]) if obsidindex + 1 < len(file_data[0]) else row.append(selected[0]) for row in file_data[1:]] del translation_dict[file_column] translation_dict[alter_colname] = [alter_colname] columns_factors = self.table_chooser.get_columns_factors_dict() #Translate column names and add columns that appear more than once file_data = self.translate_and_reorder_file_data(file_data, translation_dict) file_data = self.convert_comma_to_points_for_double_columns(file_data, self.tables_columns_info[dest_table]) if columns_factors: file_data = self.multiply_by_factor(file_data, columns_factors) file_data = self.remove_preceding_trailing_spaces_tabs(file_data) if foreign_key_obsid_table and foreign_key_obsid_table != dest_table and 'obsid' in file_data[0]: file_data = utils.filter_nonexisting_values_and_ask(file_data, 'obsid', utils.get_all_obsids(foreign_key_obsid_table), try_capitalize=False) file_data = self.reformat_date_time(file_data) importer = import_data_to_db.midv_data_importer() answer = importer.general_import(dest_table=dest_table, file_data=file_data) utils.stop_waiting_cursor() if self.close_after_import.isChecked(): self.close()
def start_import(self, files, skip_rows_without_water_level, confirm_names, import_all_data, from_date=None, to_date=None): """ """ PyQt4.QtGui.QApplication.setOverrideCursor(PyQt4.QtGui.QCursor(PyQt4.QtCore.Qt.WaitCursor)) #show the user this may take a long time... parsed_files = [] for selected_file in files: res = self.parse_func(path=selected_file, charset=self.charsetchoosen, skip_rows_without_water_level=skip_rows_without_water_level, begindate=from_date, enddate=to_date) if res == u'cancel': self.status = True PyQt4.QtGui.QApplication.restoreOverrideCursor() return res elif res in (u'skip', u'ignore'): continue try: file_data, filename, location = res except Exception as e: utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate('DiverofficeImport', u'Import error, see log message panel'), log_msg=ru(QCoreApplication.translate('DiverofficeImport', u'File %s could not be parsed. Msg:\n%s'))%(selected_file, str(e))) continue parsed_files.append((file_data, filename, location)) if len(parsed_files) == 0: utils.MessagebarAndLog.critical(bar_msg=QCoreApplication.translate('DiverofficeImport', u"Import Failure: No files imported""")) PyQt4.QtGui.QApplication.restoreOverrideCursor() return #Add obsid to all parsed filedatas by asking the user for it. filename_location_obsid = [[u'filename', u'location', u'obsid']] filename_location_obsid.extend([[parsed_file[1], parsed_file[2], parsed_file[2]] for parsed_file in parsed_files]) if confirm_names: try_capitalize = False else: try_capitalize = True existing_obsids = utils.get_all_obsids() filename_location_obsid = utils.filter_nonexisting_values_and_ask(file_data=filename_location_obsid, header_value=u'obsid', existing_values=existing_obsids, try_capitalize=try_capitalize, always_ask_user=confirm_names) if len(filename_location_obsid) < 2: utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate('DiverofficeImport', u'Warning. All files were skipped, nothing imported!')) PyQt4.QtGui.QApplication.restoreOverrideCursor() return False filenames_obsid = dict([(x[0], x[2]) for x in filename_location_obsid[1:]]) parsed_files_with_obsid = [] for file_data, filename, location in parsed_files: if filename in filenames_obsid: file_data = list(file_data) obsid = filenames_obsid[filename] file_data[0].append(u'obsid') [row.append(obsid) for row in file_data[1:]] parsed_files_with_obsid.append([file_data, filename, location]) #Header file_to_import_to_db = [parsed_files_with_obsid[0][0][0]] file_to_import_to_db.extend([row for parsed_file in parsed_files_with_obsid for row in parsed_file[0][1:]]) if not import_all_data: file_to_import_to_db = self.filter_dates_from_filedata(file_to_import_to_db, utils.get_last_logger_dates()) if len(file_to_import_to_db) < 2: utils.MessagebarAndLog.info(bar_msg=QCoreApplication.translate('DiverofficeImport', u'No new data existed in the files. Nothing imported.')) self.status = 'True' PyQt4.QtGui.QApplication.restoreOverrideCursor() return True importer = import_data_to_db.midv_data_importer() answer = importer.general_import(u'w_levels_logger', file_to_import_to_db) PyQt4.QtGui.QApplication.restoreOverrideCursor() importer.SanityCheckVacuumDB() PyQt4.QtGui.QApplication.restoreOverrideCursor() if self.close_after_import.isChecked(): self.close()
def setUp(self): super().setUp() self.importinstance = midv_data_importer()