def adjust_trend_func(self):

        obsid = self.load_obsid_and_init()
        if obsid is None:
            return None

        data = {'obsid': obsid,
                'adjust_start_date': long_dateformat(self.FromDateTime.dateTime().toPyDateTime()),
                'adjust_end_date': long_dateformat(self.ToDateTime.dateTime().toPyDateTime()),
                'L1_date': db_utils.cast_date_time_as_epoch(date_time=long_dateformat(self.L1_date.dateTime().toPyDateTime())),
                'L2_date': db_utils.cast_date_time_as_epoch(date_time=long_dateformat(self.L2_date.dateTime().toPyDateTime())),
                'M1_date': db_utils.cast_date_time_as_epoch(date_time=long_dateformat(self.M1_date.dateTime().toPyDateTime())),
                'M2_date': db_utils.cast_date_time_as_epoch(date_time=long_dateformat(self.M2_date.dateTime().toPyDateTime())),
                'L1_level': str(float(self.L1_level.text())),
                'L2_level': str(float(self.L2_level.text())),
                'M1_level': str(float(self.M1_level.text())),
                'M2_level': str(float(self.M2_level.text())),
                'date_as_numeric': db_utils.cast_date_time_as_epoch()}

        sql = """
                UPDATE w_levels_logger SET level_masl = level_masl -
                (
                 ((({L1_level} - {L2_level}) / ({L1_date} - {L2_date}))
                 - (({M1_level} - {M2_level}) / ({M1_date} - {M2_date})))
                  * ({date_as_numeric} - {L1_date})
                )
                WHERE obsid = '{obsid}' AND date_time > '{adjust_start_date}' AND date_time < '{adjust_end_date}'
            """.format(**data)
        utils.start_waiting_cursor()
        db_utils.sql_alter_db(sql)
        utils.stop_waiting_cursor()
        self.update_plot()
    def wqualreport(self):
        utils.start_waiting_cursor()
        num_data_cols = int(self.num_data_cols.text())
        rowheader_colwidth_percent = int(
            self.rowheader_colwidth_percent.text())
        empty_row_between_tables = self.empty_row_between_tables.isChecked()
        page_break_between_tables = self.page_break_between_tables.isChecked()
        from_active_layer = self.from_active_layer.isChecked()
        from_sql_table = self.from_sql_table.isChecked()
        sql_table = self.sql_table.currentText()
        sort_alphabetically = self.sort_alphabetically.isChecked()
        sort_by_obsid = self.sort_by_obsid.isChecked()
        date_time_as_columns = self.date_time_as_columns.isChecked()
        date_time_format = self.date_time_formats[
            self.date_time_format.currentText()]
        method = self.methods[self.method.currentText()]
        data_column = self.data_column.currentText()

        self.save_stored_settings(self.save_attrnames)

        wqual = Wqualreport(
            self.ms.settingsdict, num_data_cols, rowheader_colwidth_percent,
            empty_row_between_tables, page_break_between_tables,
            from_active_layer, sql_table, sort_alphabetically, sort_by_obsid,
            date_time_as_columns, date_time_format, method, data_column)
        utils.stop_waiting_cursor()
    def calculate_offset(self):
        """ Part of adjustment method 3. adjust level_masl by clicking in plot.
        this method extracts the head from head_ts with the same date as the line node.
            4. Calculating y-position - head (or level_masl) and setting self.LoggerPos.
            5. Run calibration.
        """            
        if self.log_pos is not None and self.y_pos is not None:
            utils.start_waiting_cursor()

            logger_ts = self.level_masl_ts
            
            y_pos = self.y_pos
            log_pos = self.log_pos
            self.y_pos = None
            self.log_pos = None
            log_pos_date = datestring_to_date(log_pos).replace(tzinfo=None)
            logger_value = None

            #Get the value for the selected node
            for raw_date, logger_value in logger_ts:
                date = datestring_to_date(raw_date).replace(tzinfo=None)
                if date == log_pos_date:
                    break

            if logger_value is None:
                utils.pop_up_info(ru(QCoreApplication.translate('Calibrlogger', "No connection between level_masl dates and logger date could be made!\nTry again or choose a new logger line node!")))
            else:
                self.Add2Levelmasl.setText(str(float(y_pos) - float(logger_value)))

                utils.stop_waiting_cursor()

        self.pushButtonMpos.setEnabled(False)
    def load_files(self):
        charset = utils.ask_for_charset()
        if not charset:
            raise utils.UserInterruptError()
        filename = utils.select_files(only_one_file=True, extension=ru(QCoreApplication.translate('GeneralCsvImportGui', "Comma or semicolon separated csv file %s;;Comma or semicolon separated csv text file %s;;Comma or semicolon separated file %s"))%('(*.csv)', '(*.txt)', '(*.*)'))
        if isinstance(filename, (list, tuple)):
            filename = filename[0]

        filename = ru(filename)

        delimiter = utils.get_delimiter(filename=filename, charset=charset, delimiters=[',', ';'])
        self.file_data = self.file_to_list(filename, charset, delimiter)

        header_question = utils.Askuser(question="YesNo", msg=ru(QCoreApplication.translate('GeneralCsvImportGui', """Does the file contain a header?""")))

        utils.start_waiting_cursor()
        if header_question.result:
            # Remove duplicate header entries
            header = self.file_data[0]
            seen = set()
            seen_add = seen.add
            remove_cols = [idx for idx, x in enumerate(header) if x and (x in seen or seen_add(x))]
            self.file_data = [[col for idx, col in enumerate(row) if idx not in remove_cols] for row in self.file_data]

            self.table_chooser.file_header = self.file_data[0]
        else:
            header = ['Column ' + str(colnr) for colnr in range(len(self.file_data[0]))]
            self.table_chooser.file_header = header
            self.file_data.reverse()
            self.file_data.append(header)
            self.file_data.reverse()
        utils.stop_waiting_cursor()
Ejemplo n.º 5
0
    def load_files(self):
        charset = utils.ask_for_charset()
        if not charset:
            raise utils.UserInterruptError()
        filename = utils.select_files(only_one_file=True, extension=ru(QCoreApplication.translate('GeneralCsvImportGui', "Comma or semicolon separated csv file %s;;Comma or semicolon separated csv text file %s;;Comma or semicolon separated file %s"))%('(*.csv)', '(*.txt)', '(*.*)'))
        if isinstance(filename, (list, tuple)):
            filename = filename[0]

        filename = ru(filename)

        delimiter = utils.get_delimiter(filename=filename, charset=charset, delimiters=[',', ';'])
        self.file_data = self.file_to_list(filename, charset, delimiter)

        header_question = utils.Askuser(question="YesNo", msg=ru(QCoreApplication.translate('GeneralCsvImportGui', """Does the file contain a header?""")))

        utils.start_waiting_cursor()
        if header_question.result:
            # Remove duplicate header entries
            header = self.file_data[0]
            seen = set()
            seen_add = seen.add
            remove_cols = [idx for idx, x in enumerate(header) if x and (x in seen or seen_add(x))]
            self.file_data = [[col for idx, col in enumerate(row) if idx not in remove_cols] for row in self.file_data]

            self.table_chooser.file_header = self.file_data[0]
        else:
            header = ['Column ' + str(colnr) for colnr in range(len(self.file_data[0]))]
            self.table_chooser.file_header = header
            self.file_data.reverse()
            self.file_data.append(header)
            self.file_data.reverse()
        utils.stop_waiting_cursor()
    def delete_selected_range(self, table_name):
        """ Deletes the current selected range from the database from w_levels_logger
        :return: De
        """
        current_loaded_obsid = self.obsid
        selected_obsid = self.load_obsid_and_init()
        if current_loaded_obsid != selected_obsid:
            utils.pop_up_info(ru(QCoreApplication.translate('Calibrlogger', "Error!\n The obsid selection has been changed but the plot has not been updated. No deletion done.\nUpdating plot.")))
            self.update_plot()
            return
        elif selected_obsid is None:
            utils.pop_up_info(ru(QCoreApplication.translate('Calibrlogger', "Error!\n No obsid was selected. No deletion done.\nUpdating plot.")))
            self.update_plot()
            return

        fr_d_t = str((self.FromDateTime.dateTime().toPyDateTime() - datetime.datetime(1970,1,1)).total_seconds())
        to_d_t = str((self.ToDateTime.dateTime().toPyDateTime() - datetime.datetime(1970,1,1)).total_seconds())

        sql_list = []
        sql_list.append(r"""DELETE FROM "%s" """%table_name)
        sql_list.append(r"""WHERE obsid = '%s' """%selected_obsid)
        # This %s is db formatting for seconds. It is not used as python formatting!
        sql_list.append(r"""AND CAST(strftime('%s', date_time) AS NUMERIC) """)
        sql_list.append(r""" > '%s' """%fr_d_t)
        # This %s is db formatting for seconds. It is not used as python formatting!
        sql_list.append(r"""AND CAST(strftime('%s', date_time) AS NUMERIC) """)
        sql_list.append(r""" < '%s' """%to_d_t)
        sql = ''.join(sql_list)

        really_delete = utils.Askuser("YesNo", ru(QCoreApplication.translate('Calibrlogger', "Do you want to delete the period %s to %s for obsid %s from table %s?"))%(str(self.FromDateTime.dateTime().toPyDateTime()), str(self.ToDateTime.dateTime().toPyDateTime()), selected_obsid, table_name)).result
        if really_delete:
            utils.start_waiting_cursor()
            db_utils.sql_alter_db(sql)
            utils.stop_waiting_cursor()
            self.update_plot()
Ejemplo n.º 7
0
 def showSurvey(self):
     #lyr = self.iface.activeLayer() # THIS IS TSPLOT-method, GETS THE SELECTED LAYER
     lyr = self.layer
     ids = lyr.selectedFeatureIds()
     if len(ids) == 0:
         utils.pop_up_info(ru(QCoreApplication.translate(' Stratigraphy', "No selection")), ru(QCoreApplication.translate(' Stratigraphy', "No features are selected")))
         return
     # initiate the datastore if not yet done   
     self.initStore()   
     utils.start_waiting_cursor()  # Sets the mouse cursor to wait symbol
     try:  # return from store.getData is stored in data only if no object belonging to DataSanityError class is created
         self.data = self.store.getData(ids, lyr)    # added lyr as an argument!!!
     except DataSanityError as e: # if an object 'e' belonging to DataSanityError is created, then do following
         print("DataSanityError %s"%str(e))
         utils.stop_waiting_cursor()
         utils.pop_up_info(ru(QCoreApplication.translate(' Stratigraphy', "Data sanity problem, obsid: %s\n%s")) % (e.sond_id, e.message))
         return
     except Exception as e: # if an object 'e' belonging to DataSanityError is created, then do following
         print("exception : %s"%str(e))
         utils.stop_waiting_cursor()
         utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate(' Stratigraphy', "The stratigraphy plot failed, check Midvatten plugin settings and your data!")))
         return
     utils.stop_waiting_cursor()  # Restores the mouse cursor to normal symbol
     # show widget
     w = SurveyDialog()
     #w.widget.setData2_nosorting(data)  #THIS IS IF DATA IS NOT TO BE SORTED!!
     w.widget.setData(self.data)  #THIS IS ONLY TO SORT DATA!!
     w.show()
     self.w = w # save reference so it doesn't get deleted immediately        This has to be done both here and also in midvatten instance
    def __init__(self, parent, settingsdict1={}, obsid=''):
        utils.start_waiting_cursor()#show the user this may take a long time...
        self.obsid = obsid
        self.log_pos = None
        self.y_pos = None
        self.meas_ts = None
        self.head_ts = None
        self.head_ts_for_plot = None
        self.level_masl_ts = None
        self.loggerpos_masl_or_offset_state = 1

        self.settingsdict = settingsdict1
        qgis.PyQt.QtWidgets.QDialog.__init__(self, parent)
        self.setAttribute(qgis.PyQt.QtCore.Qt.WA_DeleteOnClose)
        self.setupUi(self) # Required by Qt4 to initialize the UI
        self.setWindowTitle(ru(QCoreApplication.translate('Calibrlogger', "Calculate water level from logger"))) # Set the title for the dialog
        self.INFO.setText(ru(QCoreApplication.translate('Calibrlogger', "Select the observation point with logger data to be adjusted.")))
        self.log_calc_manual.setText("<a href=\"https://github.com/jkall/qgis-midvatten-plugin/wiki/4.-Edit-data\">Midvatten manual</a>")
      
        # Create a plot window with one single subplot
        self.calibrplotfigure = plt.figure()
        self.axes = self.calibrplotfigure.add_subplot( 111 )
        self.canvas = FigureCanvas( self.calibrplotfigure )
        self.mpltoolbar = NavigationToolbar( self.canvas, self.widgetPlot )
        self.layoutplot.addWidget( self.canvas )
        self.layoutplot.addWidget( self.mpltoolbar )

        self.show()

        self.cid =[]
                
        self.pushButtonSet.clicked.connect(lambda x: self.set_logger_pos())
        self.pushButtonAdd.clicked.connect(lambda x: self.add_to_level_masl())
        self.pushButtonFrom.clicked.connect(lambda x: self.set_from_date_from_x())
        self.pushButtonTo.clicked.connect(lambda x: self.set_to_date_from_x())
        self.L1_button.clicked.connect(lambda x: self.set_adjust_data('L1_date', 'L1_level'))
        self.L2_button.clicked.connect(lambda x: self.set_adjust_data('L2_date', 'L2_level'))
        self.M1_button.clicked.connect(lambda x: self.set_adjust_data('M1_date', 'M1_level'))
        self.M2_button.clicked.connect(lambda x: self.set_adjust_data('M2_date', 'M2_level'))
        self.pushButton_from_extent.clicked.connect(lambda: self.FromDateTime.setDateTime(num2date(self.axes.get_xbound()[0])))
        self.pushButton_to_extent.clicked.connect(lambda: self.ToDateTime.setDateTime(num2date(self.axes.get_xbound()[1])))
        self.pushButtonupdateplot.clicked.connect(lambda x: self.update_plot())
        self.pushButtonLpos.clicked.connect(lambda x: self.catch_old_level())
        self.pushButtonMpos.clicked.connect(lambda x: self.catch_new_level())
        self.pushButtonMpos.setEnabled(False)
        self.pushButtonCalcBestFit.clicked.connect(lambda x: self.logger_pos_best_fit())
        self.pushButtonCalcBestFit.setToolTip(ru(QCoreApplication.translate('Calibrlogger', 'This will calibrate all values inside the chosen period\nusing the mean difference between head_cm and w_levels measurements.\n\nThe search radius is the maximum time distance allowed\n between a logger measurement and a w_level measurement.')))
        self.pushButtonCalcBestFit2.clicked.connect(lambda x: self.level_masl_best_fit())
        self.pushButtonCalcBestFit2.setToolTip(ru(QCoreApplication.translate('Calibrlogger', 'This will calibrate all values inside the chosen period\nusing the mean difference between level_masl and w_levels measurements.\n\nThe search radius is the maximum time distance allowed\n between a logger measurement and a w_level measurement.')))
        self.pushButton_delete_logger.clicked.connect(lambda: self.delete_selected_range('w_levels_logger'))
        self.adjust_trend_button.clicked.connect(lambda x: self.adjust_trend_func())

        self.get_search_radius()

        # Populate combobox with obsid from table w_levels_logger
        self.load_obsid_from_db()

        utils.stop_waiting_cursor()#now this long process is done and the cursor is back as normal
Ejemplo n.º 9
0
 def file_to_list(filename, charset, delimiter, quotechar='"'):
     utils.start_waiting_cursor()
     try:
         with open(filename, 'rt', encoding=str(charset)) as f:
             rows_unsplit = [row.lstrip().rstrip('\n').rstrip('\r') for row in f]
             csvreader = csv.reader(rows_unsplit, delimiter=delimiter, quotechar=quotechar)
             file_data = [ru(row, keep_containers=True) for row in csvreader]
     except:
         utils.stop_waiting_cursor()
         raise
     else:
         utils.stop_waiting_cursor()
     return file_data
Ejemplo n.º 10
0
 def showSurvey(self):
     #lyr = self.iface.activeLayer() # THIS IS TSPLOT-method, GETS THE SELECTED LAYER
     lyr = self.layer
     ids = lyr.selectedFeatureIds()
     if len(ids) == 0:
         utils.pop_up_info(
             ru(QCoreApplication.translate(' Stratigraphy',
                                           "No selection")),
             ru(
                 QCoreApplication.translate(' Stratigraphy',
                                            "No features are selected")))
         return
     # initiate the datastore if not yet done
     self.initStore()
     utils.start_waiting_cursor()  # Sets the mouse cursor to wait symbol
     try:  # return from store.getData is stored in data only if no object belonging to DataSanityError class is created
         self.data = self.store.getData(ids,
                                        lyr)  # added lyr as an argument!!!
     except DataSanityError as e:  # if an object 'e' belonging to DataSanityError is created, then do following
         print("DataSanityError %s" % str(e))
         utils.stop_waiting_cursor()
         utils.pop_up_info(
             ru(
                 QCoreApplication.translate(
                     ' Stratigraphy', "Data sanity problem, obsid: %s\n%s"))
             % (e.sond_id, e.message))
         return
     except Exception as e:  # if an object 'e' belonging to DataSanityError is created, then do following
         print("exception : %s" % str(e))
         utils.stop_waiting_cursor()
         utils.MessagebarAndLog.critical(bar_msg=ru(
             QCoreApplication.translate(
                 ' Stratigraphy',
                 "The stratigraphy plot failed, check Midvatten plugin settings and your data!"
             )))
         return
     utils.stop_waiting_cursor(
     )  # Restores the mouse cursor to normal symbol
     # show widget
     w = SurveyDialog()
     #w.widget.setData2_nosorting(data)  #THIS IS IF DATA IS NOT TO BE SORTED!!
     w.widget.setData(self.data)  #THIS IS ONLY TO SORT DATA!!
     w.show()
     self.w = w  # save reference so it doesn't get deleted immediately        This has to be done both here and also in midvatten instance
 def update_level_masl_from_level_masl(self, obsid, fr_d_t, to_d_t, newzref):
     utils.start_waiting_cursor()
     """ Updates the level masl using newzref
     :param obsid: (str) The obsid
     :param fr_d_t: (datetime) start of calibration
     :param to_d_t: (datetime) end of calibration
     :param newzref: (int/float/str [m]) The correction that should be made against the head [m]
     :return: None
     """
     sql =r"""UPDATE w_levels_logger SET level_masl = """
     sql += str(newzref)
     sql += """ + level_masl WHERE obsid = '"""
     sql += obsid
     sql += """' AND level_masl IS NOT NULL"""
     # Sqlite seems to have problems with date comparison date_time >= a_date, so they have to be converted into total seconds first.
     date_time_as_epoch = db_utils.cast_date_time_as_epoch()
     sql += """ AND %s > %s"""%(date_time_as_epoch, str((fr_d_t - datetime.datetime(1970,1,1)).total_seconds()))
     sql += """ AND %s < %s""" % (date_time_as_epoch, str((to_d_t - datetime.datetime(1970, 1, 1)).total_seconds()))
     dummy = db_utils.sql_alter_db(sql)
     utils.stop_waiting_cursor()
    def calc_best_fit(self):
        """ Calculates the self.LoggerPos from self.meas_ts and self.head_ts

            First matches measurements from self.meas_ts to logger values from
            self.head_ts. This is done by making a mean of all logger values inside
            self.meas_ts date - search_radius and self.meas_ts date + search_radius.
            (this could probably be change to get only the closest logger value
            inside the search_radius instead)
            (search_radius is gotten from self.get_search_radius())

            Then calculates the mean of all matches and set to self.LoggerPos.
        """
        obsid = self.load_obsid_and_init()
        utils.start_waiting_cursor()
        self.reset_plot_selects_and_calib_help()
        search_radius = self.get_search_radius()
        if self.loggerpos_masl_or_offset_state == 1:# UPDATE TO RELEVANT TEXT
            logger_ts = self.head_ts
            text_field = self.LoggerPos
            calib_func = self.set_logger_pos
        else:# UPDATE TO RELEVANT TEXT
            logger_ts = self.level_masl_ts
            text_field = self.Add2Levelmasl
            calib_func = self.add_to_level_masl

        coupled_vals = self.match_ts_values(self.meas_ts, logger_ts, search_radius)
        if not coupled_vals:
            utils.pop_up_info(ru(QCoreApplication.translate('Calibrlogger', "There was no match found between measurements and logger values inside the chosen period.\n Try to increase the search radius!")))
        else:
            calculated_diff = str(utils.calc_mean_diff(coupled_vals))
            if not calculated_diff or calculated_diff.lower() == 'nan':
                utils.pop_up_info(ru(QCoreApplication.translate('Calibrlogger', "There was no matched measurements or logger values inside the chosen period.\n Try to increase the search radius!")))
                utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('Calibrlogger', "Calculated water level from logger: utils.calc_mean_diff(coupled_vals) didn't return a useable value.")))
            else:
                text_field.setText(calculated_diff)
                calib_func(obsid)

        utils.stop_waiting_cursor()
    def __init__(self, settingsdict, num_data_cols, rowheader_colwidth_percent,
                 empty_row_between_tables, page_break_between_tables,
                 from_active_layer, sql_table):
        #show the user this may take a long time...
        utils.start_waiting_cursor()

        self.nr_header_rows = 3

        reportfolder = os.path.join(QDir.tempPath(), 'midvatten_reports')
        if not os.path.exists(reportfolder):
            os.makedirs(reportfolder)
        reportpath = os.path.join(reportfolder, "w_qual_report.html")
        f = codecs.open(reportpath, "wb", "utf-8")

        #write some initiating html
        rpt = r"""<head><title>%s</title></head>""" % ru(
            QCoreApplication.translate(
                'Wqualreport',
                'water quality report from Midvatten plugin for QGIS'))
        rpt += r""" <meta http-equiv="content-type" content="text/html; charset=utf-8" />"""  #NOTE, all report data must be in 'utf-8'
        rpt += "<html><body>"
        f.write(rpt)

        if from_active_layer:
            utils.pop_up_info(
                ru(
                    QCoreApplication.translate(
                        'CompactWqualReport',
                        'Check that exported number of rows are identical to expected number of rows!\nFeatures in layers from sql queries can be invalid and then excluded from the report!'
                    )), 'Warning!')
            w_qual_lab_layer = qgis.utils.iface.activeLayer()
            if w_qual_lab_layer is None:
                raise utils.UsageError(
                    ru(
                        QCoreApplication.translate('CompactWqualReport',
                                                   'Must select a layer!')))
            if not w_qual_lab_layer.selectedFeatureCount():
                w_qual_lab_layer.selectAll()
            data = self.get_data_from_qgislayer(w_qual_lab_layer)
        else:
            data = self.get_data_from_sql(sql_table,
                                          utils.getselectedobjectnames())

        report_data, num_data = self.data_to_printlist(data)
        utils.MessagebarAndLog.info(bar_msg=ru(
            QCoreApplication.translate(
                'CompactWqualReport',
                'Created report from %s number of rows.')) % str(num_data))

        for startcol in range(1, len(report_data[0]), num_data_cols):
            printlist = [[row[0]] for row in report_data]
            for rownr, row in enumerate(report_data):
                printlist[rownr].extend(
                    row[startcol:min(startcol + num_data_cols, len(row))])

            filtered = [row for row in printlist if any(row[1:])]

            self.htmlcols = len(filtered[0])
            self.WriteHTMLReport(
                filtered,
                f,
                rowheader_colwidth_percent,
                empty_row_between_tables=empty_row_between_tables,
                page_break_between_tables=page_break_between_tables)

        # write some finishing html and close the file
        f.write("\n</body></html>")
        f.close()

        utils.stop_waiting_cursor(
        )  # now this long process is done and the cursor is back as normal

        if report_data:
            QDesktopServices.openUrl(QUrl.fromLocalFile(reportpath))
    def load_obsid_and_init(self):
        """ Checks the current obsid and reloads all ts.
        :return: obsid

        Info: Before, some time series was only reloaded when the obsid was changed, but this caused a problem if the
        data was changed in the background in for example spatialite gui. Now all time series are reloaded always.
        It's rather fast anyway.
        """
        utils.start_waiting_cursor()
        obsid = self.selected_obsid
        if not obsid:
            try:
                print('error onsid ' + str(obsid))
            except:
                pass
            #utils.pop_up_info(ru(QCoreApplication.translate('Calibrlogger', "ERROR: no obsid is chosen")))
            utils.stop_waiting_cursor()
            return None

        meas_sql = r"""SELECT date_time, level_masl FROM w_levels WHERE obsid = '%s' ORDER BY date_time"""%obsid
        self.meas_ts = self.sql_into_recarray(meas_sql)

        head_level_masl_sql = r"""SELECT date_time, head_cm / 100, level_masl FROM w_levels_logger WHERE obsid = '%s' ORDER BY date_time"""%obsid
        head_level_masl_list = db_utils.sql_load_fr_db(head_level_masl_sql)[1]
        head_list = [(row[0], row[1]) for row in head_level_masl_list]
        level_masl_list = [(row[0], row[2]) for row in head_level_masl_list]

        self.head_ts = self.list_of_list_to_recarray(head_list)

        if self.plot_logger_head.isChecked():
            if self.normalize_head.isChecked():
                head_vals = [row[1] for row in head_list if row[1] is not None]
                num_head = len(head_vals)
                if num_head > 0:
                    head_mean = sum(head_vals) / float(len(head_vals))

                    level_masl_vals = [row[1] for row in level_masl_list if row[1] is not None]
                    num_level_masl_vals = len(level_masl_vals)
                    if num_level_masl_vals > 0:
                        level_masl_mean = sum(level_masl_vals) / float(num_level_masl_vals)

                        normalized_head = [(row[0], row[1] + (level_masl_mean - head_mean) if row[1] is not None else None) for row in head_list]

                        self.head_ts_for_plot = self.list_of_list_to_recarray(normalized_head)
                    else:
                        utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('Calibrlogger', 'No calibrated level_masl values to normalize against.')))
                        self.head_ts_for_plot = self.head_ts
                else:
                    utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('Calibrlogger', 'No head values to normalize against.')))
                    self.head_ts_for_plot = self.head_ts
            else:
                self.head_ts_for_plot = self.head_ts
        else:
            self.head_ts_for_plot = None

        self.obsid = obsid

        self.level_masl_ts = self.list_of_list_to_recarray(level_masl_list)

        calibration_status = [obsid] if [row[1] for row in level_masl_list if row[1] is None] else []
        self.update_combobox_with_calibration_info(obsid=obsid, _obsids_with_uncalibrated_data=calibration_status)

        self.setlastcalibration(obsid)
        utils.stop_waiting_cursor()
        return obsid
    def general_import(self, dest_table, file_data, allow_obs_fk_import=False,
                       _dbconnection=None, dump_temptable=False, source_srid=None,
                       skip_confirmation=False, binary_geometry=False):
        """General method for importing a list of list to a table

            self.temptableName must be the name of the table containing the new data to import.

        :param dest_table: The destination table
        :param file_data: a list of list with a header list as first row
        :param allow_obs_fk_import: True to allow creation of obsids in obs_points and obs_lines.
        :param _dbconnection: A db_utils.DbConnectionManager-instance if other than the currently selected in the midvatten
                              settings dialog.
        :param dump_temptable: True to create a csvfile from internal temporary table.
        :param source_srid: The srid of the source geometry column if the geometry is a WKT or WKB
        :param skip_confirmation: True to not ask the user to import foreign keys.
        :param binary_geometry: True if the source geometry column should be parsed as a WKB, else it's parsed as WKT.
        :return:
        """

        self.temptable_name = None

        if skip_confirmation:
            self.foreign_keys_import_question = 1

        try:
            if file_data is None or not file_data:
                return
            utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', '\nImport to %s starting\n--------------------')) % dest_table)

            utils.start_waiting_cursor()

            if not isinstance(_dbconnection, db_utils.DbConnectionManager):
                dbconnection = db_utils.DbConnectionManager()
            else:
                dbconnection = _dbconnection

            db_utils.activate_foreign_keys(activated=True, dbconnection=dbconnection)

            recsinfile = len(file_data[1:])
            table_info = db_utils.db_tables_columns_info(table=dest_table, dbconnection=dbconnection)
            if not table_info:
                raise MidvDataImporterError(ru(QCoreApplication.translate('midv_data_importer', 'The table %s did not exist. Update the database to latest version.')) % dest_table)
            else:
                table_info = table_info[dest_table]
            #POINT and LINESTRING must be cast as BLOB. So change the type to BLOB.
            column_headers_types = db_utils.change_cast_type_for_geometry_columns(dbconnection, table_info, dest_table)
            primary_keys = [row[1] for row in table_info if int(row[5])]        #Not null columns are allowed if they have a default value.
            not_null_columns = [row[1] for row in table_info if int(row[3]) and row[4] is None]
            #Only use the columns that exists in the goal table.
            existing_columns_in_dest_table = [col for col in file_data[0] if col in column_headers_types]
            existing_columns_in_temptable = file_data[0]
            missing_columns = [column for column in not_null_columns if column not in existing_columns_in_dest_table]

            if missing_columns:
                raise MidvDataImporterError(ru(QCoreApplication.translate('midv_data_importer', 'Required columns %s are missing for table %s')) % (', '.join(missing_columns), dest_table))

            primary_keys_for_concat = [pk for pk in primary_keys if pk in existing_columns_in_temptable]

            self.list_to_table(dbconnection, dest_table, file_data, primary_keys_for_concat)

            #Delete records from self.temptable where yyyy-mm-dd hh:mm or yyyy-mm-dd hh:mm:ss already exist for the same date.
            nr_before = dbconnection.execute_and_fetchall('''select count(*) from %s''' % (self.temptable_name))[0][0]
            if 'date_time' in primary_keys:
                self.delete_existing_date_times_from_temptable(primary_keys, dest_table, dbconnection)
            nr_after = dbconnection.execute_and_fetchall('''select count(*) from %s''' % (self.temptable_name))[0][0]

            nr_same_date = nr_after - nr_before
            if nr_same_date > 0:
                utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', 'In total "%s" rows with the same date \non format yyyy-mm-dd hh:mm or yyyy-mm-dd hh:mm:ss already existed and will not be imported. %s rows remain.'))%(str(nr_same_date), str(nr_after)))
            if not nr_after > 0:
                utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('midv_data_importer', 'Nothing imported to %s after deleting duplicate date_times')) % dest_table)
                return

            #Special cases for some tables
            if dest_table == 'stratigraphy':
                self.check_and_delete_stratigraphy(existing_columns_in_dest_table, dbconnection)

            # Dump temptable to csv for debugging
            if dump_temptable:
                dbconnection.dump_table_2_csv(self.temptable_name)

            # Import foreign keys in some special cases
            foreign_keys = db_utils.get_foreign_keys(dest_table, dbconnection=dbconnection)
            if foreign_keys:
                if not allow_obs_fk_import:
                    for table in ['obs_points', 'obs_lines']:
                        if table in foreign_keys:
                            del foreign_keys[table]

                if foreign_keys:
                    if self.foreign_keys_import_question is None:
                        msg = ru(QCoreApplication.translate('midv_data_importer', """Please note!\nForeign keys will be imported silently into "%s" if needed. \n\nProceed?""")) % (', '.join(list(foreign_keys.keys())))
                        utils.MessagebarAndLog.info(log_msg=msg)
                        stop_question = utils.Askuser("YesNo", msg, ru(QCoreApplication.translate('midv_data_importer', "Info!")))
                        if stop_question.result == 0:      # if the user wants to abort
                            raise UserInterruptError()
                        else:
                            self.foreign_keys_import_question = 1
                    if self.foreign_keys_import_question == 1:
                        nr_before = nr_after
                        self.import_foreign_keys(dbconnection, dest_table, self.temptable_name, foreign_keys, existing_columns_in_temptable)
                        nr_after = dbconnection.execute_and_fetchall('''select count(*) from %s''' % (self.temptable_name))[0][0]
                        nr_after_foreign_keys = nr_before - nr_after
                        utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', 'In total "%s" rows were deleted due to foreign keys restrictions and "%s" rows remain.'))%(str(nr_after_foreign_keys), str(nr_after)))

            if not nr_after > 0:
                raise MidvDataImporterError(ru(QCoreApplication.translate('midv_data_importer', 'Nothing imported, see log message panel')))

            #Finally import data:
            nr_failed_import = recsinfile - nr_after
            if nr_failed_import > 0:
                msg = ru(QCoreApplication.translate('midv_data_importer', """Please note!\nThere are %s rows in your data that can not be imported!\nDo you really want to import the rest?\nAnswering yes will start, from top of the imported file and only import the first of the duplicates.\n\nProceed?""" ))% (str(nr_failed_import))
                utils.MessagebarAndLog.info(log_msg=msg)
                stop_question = utils.Askuser("YesNo", msg, ru(QCoreApplication.translate('midv_data_importer', "Warning!")))
                if stop_question.result == 0:      # if the user wants to abort
                    raise UserInterruptError()

            # Check if current table has geometry:
            geom_columns = db_utils.get_geometry_types(dbconnection, dest_table)
            sourcecols = []
            for colname in sorted(existing_columns_in_dest_table):
                null_replacement = db_utils.cast_null(column_headers_types[colname], dbconnection)
                if colname in list(geom_columns.keys()) and colname in existing_columns_in_temptable:
                    sourcecols.append(self.create_geometry_sql(colname, dest_table, dbconnection, source_srid,
                                                               null_replacement, binary_geometry))
                else:
                    sourcecols.append(
                        """(CASE WHEN ({colname} !='' AND {colname} !=' ' AND {colname} IS NOT NULL)\n    THEN CAST({colname} AS {type}) ELSE {null} END)""".format(
                            colname=colname,
                            type=column_headers_types[colname],
                            null=null_replacement))

            sql = """INSERT INTO {dest_table} ({dest_columns})\nSELECT {source_columns}\nFROM {source_table}\n"""
            kwargs = {'dest_table': dest_table,
                      'dest_columns': ', '.join(sorted(existing_columns_in_dest_table)),
                      'source_table': self.temptable_name,
                      'source_columns': u',\n    '.join(sourcecols)
                      }
            if not_null_columns:
                sql += """WHERE {notnullcheck}"""
                kwargs['notnullcheck'] = ' AND '.join(['%s IS NOT NULL'%notnullcol
                                                       for notnullcol in sorted(not_null_columns)])
            sql = sql.format(**kwargs)
            recsbefore = dbconnection.execute_and_fetchall('select count(*) from %s' % (dest_table))[0][0]
            try:
                dbconnection.execute(sql)
            except Exception as e:
                utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', 'INSERT failed while importing to %s. Using INSERT OR IGNORE instead. Msg:\n')) % dest_table + ru(str(e)))
                sql = db_utils.add_insert_or_ignore_to_sql(sql, dbconnection)
                try:
                    dbconnection.execute(sql)
                except Exception as e:
                    try:
                        str(e)
                    except UnicodeDecodeError:
                        utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('midv_data_importer', 'Import failed, see log message panel')),
                                                        log_msg=ru(QCoreApplication.translate('midv_data_importer', 'Sql\n%s  failed.')) % (sql), duration=999)
                    else:
                        utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('midv_data_importer', 'Import failed, see log message panel')),
                                                        log_msg=ru(QCoreApplication.translate('midv_data_importer', 'Sql\n%s  failed.\nMsg:\n%s')) % (sql, ru(str(e))), duration=999)

            recsafter = dbconnection.execute_and_fetchall('select count(*) from %s' % (dest_table))[0][0]
            nr_imported = recsafter - recsbefore
            nr_excluded = recsinfile - nr_imported

            utils.MessagebarAndLog.info(bar_msg=ru(QCoreApplication.translate('midv_data_importer', '%s rows imported and %s excluded for table %s. See log message panel for details'))%(nr_imported, nr_excluded, dest_table),
                                        log_msg='--------------------')

        except:
            # If an external dbconnection is supplied, do not close it.
            if _dbconnection is None:
                try:
                    dbconnection.closedb()
                except:
                    pass
            else:
                if self.temptable_name is not None:
                    #try:
                    dbconnection.drop_temporary_table(self.temptable_name)
                    #except:
                    #    pass
            utils.stop_waiting_cursor()
            raise
        else:
            dbconnection.commit()
            # If an external dbconnection is supplied, do not close it.
            if _dbconnection is None:
                try:
                    dbconnection.closedb()
                except:
                    pass
            else:
                if self.temptable_name is not None:
                    #try:
                    dbconnection.drop_temporary_table(self.temptable_name)
                    #except:
                    #    pass
            utils.stop_waiting_cursor()
    def start_import(self,
                     files,
                     skip_rows_without_water_level,
                     confirm_names,
                     import_all_data,
                     from_date=None,
                     to_date=None,
                     export_csv=False,
                     import_to_db=True):
        """
        """
        utils.start_waiting_cursor(
        )  #show the user this may take a long time...
        parsed_files = []
        for selected_file in files:
            try:
                res = self.parse_func(
                    path=selected_file,
                    charset=self.charsetchoosen,
                    skip_rows_without_water_level=skip_rows_without_water_level,
                    begindate=from_date,
                    enddate=to_date)
            except:
                utils.MessagebarAndLog.critical(bar_msg=ru(
                    QCoreApplication.translate('LeveloggerImport',
                                               '''Error on file %s.''')) %
                                                selected_file)
                raise

            if res == 'cancel':
                self.status = True
                utils.stop_waiting_cursor()
                return res
            elif res in ('skip', 'ignore'):
                continue

            try:
                file_data, filename, location = res
            except Exception as e:
                utils.MessagebarAndLog.warning(
                    bar_msg=QCoreApplication.translate(
                        'DiverofficeImport',
                        'Import error, see log message panel'),
                    log_msg=ru(
                        QCoreApplication.translate(
                            'DiverofficeImport',
                            'File %s could not be parsed. Msg:\n%s')) %
                    (selected_file, str(e)))
                continue
            parsed_files.append((file_data, filename, location))

        if len(parsed_files) == 0:
            utils.MessagebarAndLog.critical(bar_msg=QCoreApplication.translate(
                'DiverofficeImport', "Import Failure: No files imported"
                ""))
            utils.stop_waiting_cursor()
            return

        #Add obsid to all parsed filedatas by asking the user for it.
        filename_location_obsid = [['filename', 'location', 'obsid']]
        filename_location_obsid.extend(
            [[parsed_file[1], parsed_file[2], parsed_file[2]]
             for parsed_file in parsed_files])

        if confirm_names:
            try_capitalize = False
        else:
            try_capitalize = True

        existing_obsids = utils.get_all_obsids()
        utils.stop_waiting_cursor()
        filename_location_obsid = utils.filter_nonexisting_values_and_ask(
            file_data=filename_location_obsid,
            header_value='obsid',
            existing_values=existing_obsids,
            try_capitalize=try_capitalize,
            always_ask_user=confirm_names)
        utils.start_waiting_cursor()

        if len(filename_location_obsid) < 2:
            utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate(
                'DiverofficeImport',
                'Warning. All files were skipped, nothing imported!'))
            utils.stop_waiting_cursor()
            return False

        filenames_obsid = dict([(x[0], x[2])
                                for x in filename_location_obsid[1:]])

        parsed_files_with_obsid = []
        for file_data, filename, location in parsed_files:
            if filename in filenames_obsid:
                file_data = list(file_data)
                obsid = filenames_obsid[filename]
                file_data[0].append('obsid')
                [row.append(obsid) for row in file_data[1:]]
                parsed_files_with_obsid.append([file_data, filename, location])
        #Header
        file_to_import_to_db = [parsed_files_with_obsid[0][0][0]]
        file_to_import_to_db.extend([
            row for parsed_file in parsed_files_with_obsid
            for row in parsed_file[0][1:]
        ])

        # Add comment to import:
        #file_to_import_to_db[0].append('comment')
        #comment = ''
        #[row.append(comment) for row in file_to_import_to_db[1:]]

        if not import_all_data:
            file_to_import_to_db = self.filter_dates_from_filedata(
                file_to_import_to_db, utils.get_last_logger_dates())
        if len(file_to_import_to_db) < 2:
            utils.MessagebarAndLog.info(bar_msg=QCoreApplication.translate(
                'DiverofficeImport',
                'No new data existed in the files. Nothing imported.'))
            self.status = 'True'
            utils.stop_waiting_cursor()
            return True

        if import_to_db:
            importer = import_data_to_db.midv_data_importer()
            answer = importer.general_import('w_levels_logger',
                                             file_to_import_to_db)

        if export_csv:
            path = qgis.PyQt.QtWidgets.QFileDialog.getSaveFileName(
                self, 'Save File', '', 'CSV(*.csv)')
            if path:
                path = ru(path[0])
                utils.write_printlist_to_file(path, file_to_import_to_db)

        utils.stop_waiting_cursor()

        if self.close_after_import.isChecked():
            self.close()
Ejemplo n.º 17
0
    def __init__(self, layer, settingsdict={}):
        #show the user this may take a long time...
        utils.start_waiting_cursor()

        self.settingsdict = settingsdict
        provider = layer.dataProvider()  # OGR provider
        kolumnindex = provider.fieldNameIndex(
            'obsid')  # To find the column named 'obsid'
        observations = layer.getSelectedFeatures()

        reportfolder = os.path.join(QDir.tempPath(), 'midvatten_reports')
        if not os.path.exists(reportfolder):
            os.makedirs(reportfolder)
        reportpath = os.path.join(reportfolder, "w_qual_report.html")
        #f = open(reportpath, "wb" )
        f = codecs.open(reportpath, "wb", "utf-8")

        #write some initiating html
        rpt = r"""<head><title>%s</title></head>""" % ru(
            QCoreApplication.translate(
                'Wqualreport',
                'water quality report from Midvatten plugin for QGIS'))
        rpt += r""" <meta http-equiv="content-type" content="text/html; charset=utf-8" />"""  #NOTE, all report data must be in 'utf-8'
        rpt += "<html><body>"
        #rpt += "<table width=\"100%\" border=\"1\">\n"
        #rpt2 = rpt.encode("utf-8")
        f.write(rpt)

        dbconnection = db_utils.DbConnectionManager()

        for feature in observations:
            attributes = feature.attributes()
            obsid = attributes[kolumnindex]
            try:
                print('about to get data for ' + obsid + ', at time: ' +
                      str(time.time()))  #debug
            except:
                pass
            ReportData = self.GetData(
                self.settingsdict['database'], obsid,
                dbconnection)  # one observation at a time
            try:
                print('done with getting data for ' + obsid + ', at time: ' +
                      str(time.time()))  #debug
            except:
                pass
            if ReportData:
                self.WriteHTMLReport(ReportData, f)
            try:
                print('wrote html report for ' + obsid + ', at time: ' +
                      str(time.time()))  #debug
            except:
                pass

        dbconnection.closedb()
        #write some finishing html and close the file
        f.write("\n</body></html>")
        f.close()

        utils.stop_waiting_cursor(
        )  #now this long process is done and the cursor is back as normal

        if ReportData:
            QDesktopServices.openUrl(QUrl.fromLocalFile(reportpath))
    def start_import(self, files, skip_rows_without_water_level, confirm_names, import_all_data, from_date=None, to_date=None):
        """
        """
        utils.start_waiting_cursor()  #show the user this may take a long time...
        parsed_files = []
        for selected_file in files:
            res = self.parse_func(path=selected_file, charset=self.charsetchoosen, skip_rows_without_water_level=skip_rows_without_water_level, begindate=from_date, enddate=to_date)
            if res == 'cancel':
                self.status = True
                utils.stop_waiting_cursor()
                return res
            elif res in ('skip', 'ignore'):
                continue

            try:
                file_data, filename, location = res
            except Exception as e:
                utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate('DiverofficeImport', 'Import error, see log message panel'),
                                               log_msg=ru(QCoreApplication.translate('DiverofficeImport', 'File %s could not be parsed. Msg:\n%s'))%(selected_file, str(e)))
                continue
            parsed_files.append((file_data, filename, location))

        if len(parsed_files) == 0:
            utils.MessagebarAndLog.critical(bar_msg=QCoreApplication.translate('DiverofficeImport', "Import Failure: No files imported"""))
            utils.stop_waiting_cursor()
            return

        #Add obsid to all parsed filedatas by asking the user for it.
        filename_location_obsid = [['filename', 'location', 'obsid']]
        filename_location_obsid.extend([[parsed_file[1], parsed_file[2], parsed_file[2]] for parsed_file in parsed_files])

        if confirm_names:
            try_capitalize = False
        else:
            try_capitalize = True

        existing_obsids = utils.get_all_obsids()
        utils.stop_waiting_cursor()
        filename_location_obsid = utils.filter_nonexisting_values_and_ask(file_data=filename_location_obsid, header_value='obsid', existing_values=existing_obsids, try_capitalize=try_capitalize, always_ask_user=confirm_names)
        utils.start_waiting_cursor()

        if len(filename_location_obsid) < 2:
            utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate('DiverofficeImport', 'Warning. All files were skipped, nothing imported!'))
            utils.stop_waiting_cursor()
            return False

        filenames_obsid = dict([(x[0], x[2]) for x in filename_location_obsid[1:]])

        parsed_files_with_obsid = []
        for file_data, filename, location in parsed_files:
            if filename in filenames_obsid:
                file_data = list(file_data)
                obsid = filenames_obsid[filename]
                file_data[0].append('obsid')
                [row.append(obsid) for row in file_data[1:]]
                parsed_files_with_obsid.append([file_data, filename, location])
        #Header
        file_to_import_to_db =  [parsed_files_with_obsid[0][0][0]]
        file_to_import_to_db.extend([row for parsed_file in parsed_files_with_obsid for row in parsed_file[0][1:]])

        if not import_all_data:
            file_to_import_to_db = self.filter_dates_from_filedata(file_to_import_to_db, utils.get_last_logger_dates())
        if len(file_to_import_to_db) < 2:
            utils.MessagebarAndLog.info(bar_msg=QCoreApplication.translate('DiverofficeImport', 'No new data existed in the files. Nothing imported.'))
            self.status = 'True'
            utils.stop_waiting_cursor()
            return True

        importer = import_data_to_db.midv_data_importer()
        answer = importer.general_import('w_levels_logger', file_to_import_to_db)

        utils.stop_waiting_cursor()

        if self.close_after_import.isChecked():
            self.close()
Ejemplo n.º 19
0
    def general_import(self,
                       goal_table,
                       file_data,
                       allow_obs_fk_import=False,
                       _dbconnection=None,
                       dump_temptable=False):
        """General method for importing an sqlite table into a goal_table

            self.temptableName must be the name of the table containing the new data to import.

        :param goal_table:
        :return:
        """
        try:
            if file_data is None or not file_data:
                return
            utils.MessagebarAndLog.info(log_msg=ru(
                QCoreApplication.translate(
                    'midv_data_importer',
                    '\nImport to %s starting\n--------------------')) %
                                        goal_table)

            utils.start_waiting_cursor()

            self.temptable_name = goal_table + '_temp'

            if not isinstance(_dbconnection, db_utils.DbConnectionManager):
                dbconnection = db_utils.DbConnectionManager()
            else:
                dbconnection = _dbconnection

            db_utils.activate_foreign_keys(activated=True,
                                           dbconnection=dbconnection)

            recsinfile = len(file_data[1:])
            table_info = db_utils.db_tables_columns_info(
                table=goal_table, dbconnection=dbconnection)
            if not table_info:
                raise MidvDataImporterError(
                    ru(
                        QCoreApplication.translate(
                            'midv_data_importer',
                            'The table %s did not exist. Update the database to latest version.'
                        )) % goal_table)
            else:
                table_info = table_info[goal_table]
            #POINT and LINESTRING must be cast as BLOB. So change the type to BLOB.
            column_headers_types = db_utils.change_cast_type_for_geometry_columns(
                dbconnection, table_info, goal_table)
            primary_keys = [
                row[1] for row in table_info if int(row[5])
            ]  #Not null columns are allowed if they have a default value.
            not_null_columns = [
                row[1] for row in table_info if int(row[3]) and row[4] is None
            ]
            #Only use the columns that exists in the goal table.
            existing_columns_in_goal_table = [
                col for col in file_data[0] if col in column_headers_types
            ]
            existing_columns_in_temptable = file_data[0]
            missing_columns = [
                column for column in not_null_columns
                if column not in existing_columns_in_goal_table
            ]

            if missing_columns:
                raise MidvDataImporterError(
                    ru(
                        QCoreApplication.translate(
                            'midv_data_importer',
                            'Required columns %s are missing for table %s')) %
                    (', '.join(missing_columns), goal_table))

            primary_keys_for_concat = [
                pk for pk in primary_keys
                if pk in existing_columns_in_temptable
            ]

            self.list_to_table(dbconnection, file_data,
                               primary_keys_for_concat)

            #Delete records from self.temptable where yyyy-mm-dd hh:mm or yyyy-mm-dd hh:mm:ss already exist for the same date.
            nr_before = dbconnection.execute_and_fetchall(
                '''select count(*) from %s''' % (self.temptable_name))[0][0]
            if 'date_time' in primary_keys:
                self.delete_existing_date_times_from_temptable(
                    primary_keys, goal_table, dbconnection)
            nr_after = dbconnection.execute_and_fetchall(
                '''select count(*) from %s''' % (self.temptable_name))[0][0]

            nr_same_date = nr_after - nr_before
            if nr_same_date > 0:
                utils.MessagebarAndLog.info(log_msg=ru(
                    QCoreApplication.translate(
                        'midv_data_importer',
                        'In total "%s" rows with the same date \non format yyyy-mm-dd hh:mm or yyyy-mm-dd hh:mm:ss already existed and will not be imported. %s rows remain.'
                    )) % (str(nr_same_date), str(nr_after)))
            if not nr_after > 0:
                utils.MessagebarAndLog.warning(bar_msg=ru(
                    QCoreApplication.translate(
                        'midv_data_importer',
                        'Nothing imported to %s after deleting duplicate date_times'
                    )) % goal_table)
                return

            #Special cases for some tables
            if goal_table == 'stratigraphy':
                self.check_and_delete_stratigraphy(
                    existing_columns_in_goal_table, dbconnection)
            # Check if current table has geometry:
            geom_columns = db_utils.get_geometry_types(dbconnection,
                                                       goal_table)
            for geom_col in geom_columns.keys():
                if geom_col in existing_columns_in_temptable:
                    self.calculate_geometry(geom_col, goal_table, dbconnection)

            # Dump temptable to csv for debugging
            if dump_temptable:
                dbconnection.dump_table_2_csv(self.temptable_name)

            # Import foreign keys in some special cases
            foreign_keys = db_utils.get_foreign_keys(goal_table,
                                                     dbconnection=dbconnection)
            if foreign_keys:
                if not allow_obs_fk_import:
                    for table in ['obs_points', 'obs_lines']:
                        if table in foreign_keys:
                            del foreign_keys[table]

                if foreign_keys:
                    if self.foreign_keys_import_question is None:
                        msg = ru(
                            QCoreApplication.translate(
                                'midv_data_importer',
                                """Please note!\nForeign keys will be imported silently into "%s" if needed. \n\nProceed?"""
                            )) % (', '.join(list(foreign_keys.keys())))
                        utils.MessagebarAndLog.info(log_msg=msg)
                        stop_question = utils.Askuser(
                            "YesNo", msg,
                            ru(
                                QCoreApplication.translate(
                                    'midv_data_importer', "Info!")))
                        if stop_question.result == 0:  # if the user wants to abort
                            raise UserInterruptError()
                        else:
                            self.foreign_keys_import_question = 1
                    if self.foreign_keys_import_question == 1:
                        nr_before = nr_after
                        self.import_foreign_keys(
                            dbconnection, goal_table, self.temptable_name,
                            foreign_keys, existing_columns_in_temptable)
                        nr_after = dbconnection.execute_and_fetchall(
                            '''select count(*) from %s''' %
                            (self.temptable_name))[0][0]
                        nr_after_foreign_keys = nr_before - nr_after
                        utils.MessagebarAndLog.info(log_msg=ru(
                            QCoreApplication.translate(
                                'midv_data_importer',
                                'In total "%s" rows were deleted due to foreign keys restrictions and "%s" rows remain.'
                            )) % (str(nr_after_foreign_keys), str(nr_after)))

            if not nr_after > 0:
                raise MidvDataImporterError(
                    ru(
                        QCoreApplication.translate(
                            'midv_data_importer',
                            'Nothing imported, see log message panel')))

            #Finally import data:
            nr_failed_import = recsinfile - nr_after
            if nr_failed_import > 0:
                msg = ru(
                    QCoreApplication.translate(
                        'midv_data_importer',
                        """Please note!\nThere are %s rows in your data that can not be imported!\nDo you really want to import the rest?\nAnswering yes will start, from top of the imported file and only import the first of the duplicates.\n\nProceed?"""
                    )) % (str(nr_failed_import))
                utils.MessagebarAndLog.info(log_msg=msg)
                stop_question = utils.Askuser(
                    "YesNo", msg,
                    ru(
                        QCoreApplication.translate('midv_data_importer',
                                                   "Warning!")))
                if stop_question.result == 0:  # if the user wants to abort
                    raise UserInterruptError()

            sql = """INSERT INTO %s (""" % goal_table
            sql += ', '.join(sorted(existing_columns_in_goal_table))
            sql += """) SELECT """
            sql += ', '.join([
                """(CASE WHEN (%s !='' AND %s !=' ' AND %s IS NOT NULL) THEN CAST(%s AS %s) ELSE %s END)"""
                % (colname, colname, colname, colname,
                   column_headers_types[colname],
                   db_utils.cast_null(column_headers_types[colname],
                                      dbconnection))
                for colname in sorted(existing_columns_in_goal_table)
            ])
            sql += """FROM %s""" % (self.temptable_name)
            if not_null_columns:
                sql += """ WHERE %s""" % ' AND '.join([
                    '%s IS NOT NULL' % notnullcol
                    for notnullcol in sorted(not_null_columns)
                ])

            recsbefore = dbconnection.execute_and_fetchall(
                'select count(*) from %s' % (goal_table))[0][0]
            try:
                dbconnection.execute(sql)
            except Exception as e:
                utils.MessagebarAndLog.info(log_msg=ru(
                    QCoreApplication.translate(
                        'midv_data_importer',
                        'INSERT failed while importing to %s. Using INSERT OR IGNORE instead. Msg:\n'
                    )) % goal_table + ru(str(e)))
                sql = db_utils.add_insert_or_ignore_to_sql(sql, dbconnection)
                try:
                    dbconnection.execute(sql)
                except Exception as e:
                    try:
                        str(e)
                    except UnicodeDecodeError:
                        utils.MessagebarAndLog.critical(
                            bar_msg=ru(
                                QCoreApplication.translate(
                                    'midv_data_importer',
                                    'Import failed, see log message panel')),
                            log_msg=ru(
                                QCoreApplication.translate(
                                    'midv_data_importer',
                                    'Sql\n%s  failed.')) % (sql),
                            duration=999)
                    else:
                        utils.MessagebarAndLog.critical(
                            bar_msg=ru(
                                QCoreApplication.translate(
                                    'midv_data_importer',
                                    'Import failed, see log message panel')),
                            log_msg=ru(
                                QCoreApplication.translate(
                                    'midv_data_importer',
                                    'Sql\n%s  failed.\nMsg:\n%s')) %
                            (sql, ru(str(e))),
                            duration=999)

            recsafter = dbconnection.execute_and_fetchall(
                'select count(*) from %s' % (goal_table))[0][0]

            nr_imported = recsafter - recsbefore
            nr_excluded = recsinfile - nr_imported

            utils.MessagebarAndLog.info(bar_msg=ru(
                QCoreApplication.translate(
                    'midv_data_importer',
                    '%s rows imported and %s excluded for table %s. See log message panel for details'
                )) % (nr_imported, nr_excluded, goal_table),
                                        log_msg='--------------------')

            #If an external dbconnection is supplied, do not close it.
            if _dbconnection is None:
                dbconnection.commit_and_closedb()
            else:
                dbconnection.commit()
            utils.stop_waiting_cursor()
        except:
            utils.stop_waiting_cursor()
            try:
                # If an external dbconnection is supplied, do not close it.
                if _dbconnection is None:
                    dbconnection.closedb()
                else:
                    pass
            except NameError():
                pass
            except:
                utils.MessagebarAndLog.warning(bar_msg=ru(
                    QCoreApplication.translate('midv_data_importer',
                                               'Closing database failed!')))
            raise
Ejemplo n.º 20
0
    def create_new_spatialite_db(self, verno, user_select_CRS='y', EPSG_code='4326', delete_srids=True):  #CreateNewDB(self, verno):
        """Open a new DataBase (create an empty one if file doesn't exists) and set as default DB"""

        utils.stop_waiting_cursor()
        set_locale = self.ask_for_locale()
        utils.start_waiting_cursor()

        if user_select_CRS=='y':
            utils.stop_waiting_cursor()
            EPSGID=str(self.ask_for_CRS(set_locale)[0])
            utils.start_waiting_cursor()
        else:
            EPSGID=EPSG_code

        if EPSGID=='0' or not EPSGID:
            raise utils.UserInterruptError()
        # If a CRS is selectd, go on and create the database

        #path and name of new db
        utils.stop_waiting_cursor()
        dbpath = ru(utils.get_save_file_name_no_extension(parent=None, caption="New DB",
                                                                    directory="midv_obsdb.sqlite",
                                                                    filter="Spatialite (*.sqlite)"))

        utils.start_waiting_cursor()

        if os.path.exists(dbpath):
            utils.MessagebarAndLog.critical(
                bar_msg=ru(QCoreApplication.translate('NewDb', 'A database with the chosen name already existed. Cancelling...')))
            utils.stop_waiting_cursor()
            return ''

        #Create the database
        conn = db_utils.connect_with_spatialite_connect(dbpath)
        conn.close()

        self.db_settings = ru(utils.anything_to_string_representation({'spatialite': {'dbpath': dbpath}}))

        #dbconnection = db_utils.DbConnectionManager(self.db_settings)
        try:
            # creating/connecting the test_db
            dbconnection = db_utils.DbConnectionManager(self.db_settings)
            dbconnection.execute("PRAGMA foreign_keys = ON")    #Foreign key constraints are disabled by default (for backwards compatibility), so must be enabled separately for each database dbconnection separately.
        except Exception as e:
            utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('NewDb', "Impossible to connect to selected DataBase, see log message panel")), log_msg=ru(QCoreApplication.translate('NewDb', 'Msg:\n') + str(e)))
            #utils.pop_up_info("Impossible to connect to selected DataBase")
            utils.stop_waiting_cursor()
            return ''
        d =dbconnection.connector
        #First, find spatialite version
        versionstext = dbconnection.execute_and_fetchall('select spatialite_version()')[0][0]
        # load sql syntax to initialise spatial metadata, automatically create GEOMETRY_COLUMNS and SPATIAL_REF_SYS
        # then the syntax defines a Midvatten project db according to the loaded .sql-file
        if not int(versionstext[0][0]) > 3: # which file to use depends on spatialite version installed
            utils.pop_up_info(ru(QCoreApplication.translate('NewDb', "Midvatten plugin needs spatialite4.\nDatabase can not be created")))
            utils.stop_waiting_cursor()
            return ''

        filenamestring = "create_db.sql"

        SQLFile = os.path.join(os.sep,os.path.dirname(__file__),"..","definitions",filenamestring)
        qgisverno = Qgis.QGIS_VERSION#We want to store info about which qgis-version that created the db
        replace_word_replace_with = [('CHANGETORELEVANTEPSGID', ru(EPSGID)),
                                    ('CHANGETOPLUGINVERSION', ru(verno)),
                                    ('CHANGETOQGISVERSION', ru(qgisverno)),
                                    ('CHANGETODBANDVERSION', 'SpatiaLite version %s'%ru(versionstext)),
                                    ('CHANGETOLOCALE', ru(set_locale)),
                                    (('SPATIALITE ', ''))]

        with open(SQLFile, 'r') as f:
            f.readline()  # first line is encoding info....
            lines = [ru(line) for line in f]
        sql_lines = ['{};'.format(l) for l in ' '.join(lines).split(';') if l]
        for line in sql_lines:
            if all([line, not line.startswith("#"), 'POSTGIS' not in line]):
                sql = self.replace_words(line, replace_word_replace_with)
                try:
                    dbconnection.execute(sql)
                except:
                    try:
                        print(str(sql))
                    except:
                        pass
                    raise

        if delete_srids:
            db_utils.delete_srids(dbconnection, EPSGID)


        self.insert_datadomains(set_locale, dbconnection)

        execute_sqlfile(get_full_filename("insert_obs_points_triggers.sql"), dbconnection)

        execute_sqlfile(get_full_filename('qgis3_obsp_fix.sql'), dbconnection)

        self.add_metadata_to_about_db(dbconnection)

        #FINISHED WORKING WITH THE DATABASE, CLOSE CONNECTIONS

        dbconnection.commit()
        dbconnection.vacuum()
        dbconnection.commit_and_closedb()

        #create SpatiaLite Connection in QGIS QSettings
        settings=qgis.PyQt.QtCore.QSettings()
        settings.beginGroup('/SpatiaLite/dbconnections')
        settings.setValue('%s/sqlitepath'%os.path.basename(dbpath),'%s'%dbpath)
        settings.endGroup()

        """
        #The intention is to keep layer styles in the database by using the class AddLayerStyles but due to limitations in how layer styles are stored in the database, I will put this class on hold for a while.

        #Finally add the layer styles info into the data base
        AddLayerStyles(dbpath)
        """

        utils.stop_waiting_cursor()
Ejemplo n.º 21
0
    def populate_postgis_db(self, verno, user_select_CRS='y', EPSG_code='4326'):

        dbconnection = db_utils.DbConnectionManager()
        db_settings = dbconnection.db_settings
        if not isinstance(db_settings, str):
            self.db_settings = ru(utils.anything_to_string_representation(dbconnection.db_settings))
        else:
            self.db_settings = ru(db_settings)
        if dbconnection.dbtype != 'postgis':
            raise utils.UsageError('Database type postgis not selected, check Midvatten settings!')

        dbconnection.execute('CREATE EXTENSION IF NOT EXISTS postgis;')

        result = dbconnection.execute_and_fetchall('select version(), PostGIS_full_version();')

        versionstext = ', '.join(result[0])

        utils.stop_waiting_cursor()
        set_locale = self.ask_for_locale()
        utils.start_waiting_cursor()

        if user_select_CRS=='y':
            utils.stop_waiting_cursor()
            EPSGID=str(self.ask_for_CRS(set_locale)[0])
            utils.start_waiting_cursor()
        else:
            EPSGID=EPSG_code

        if EPSGID=='0' or not EPSGID:
            raise utils.UserInterruptError()

        filenamestring = "create_db.sql"

        SQLFile = os.path.join(os.sep,os.path.dirname(__file__),"..","definitions",filenamestring)
        qgisverno = Qgis.QGIS_VERSION#We want to store info about which qgis-version that created the db
        replace_word_replace_with = [
            ('CHANGETORELEVANTEPSGID', ru(EPSGID)),
            ('CHANGETOPLUGINVERSION', ru(verno)),
            ('CHANGETOQGISVERSION', ru(qgisverno)),
            ('CHANGETODBANDVERSION', 'PostGIS version %s' % ru(versionstext)),
            ('CHANGETOLOCALE', ru(set_locale)),
            ('double', 'double precision'),
            ('"', ''),
            ('rowid as rowid', 'CTID as rowid'),
            ('POSTGIS ', '')]

        created_tables_sqls = {}
        with open(SQLFile, 'r') as f:
            f.readline()  # first line is encoding info....
            lines = [ru(line) for line in f]
        sql_lines = ['{};'.format(l) for l in ' '.join(lines).split(';') if l]
        for linenr, line in enumerate(sql_lines):
            if all([line, not line.startswith("#"), 'InitSpatialMetadata' not in line, 'SPATIALITE' not in line,
                    line.replace(';', '').strip().replace('\n', '').replace('\r', '')]):
                sql = self.replace_words(line, replace_word_replace_with)
                try:
                    dbconnection.execute(sql)
                except:
                    try:
                        print(str(sql))
                        print("numlines: " + str(len(sql_lines)))
                        print("Error on line nr {}".format(str(linenr)))
                        print("before " + sql_lines[linenr - 1])
                        if linenr + 1 < len(sql_lines):
                            print("after " + sql_lines[linenr + 1 ])
                    except:
                        pass
                    raise
                else:
                    _sql = sql.lstrip('\r').lstrip('\n').lstrip()
                    if _sql.startswith('CREATE TABLE'):
                        tablename = ' '.join(_sql.split()).split()[2]
                        created_tables_sqls[tablename] = sql

            #lines = [self.replace_words(line.decode('utf-8').rstrip('\n').rstrip('\r'), replace_word_replace_with) for line in f if all([line,not line.startswith("#"), 'InitSpatialMetadata' not in line])]
        #db_utils.sql_alter_db(lines)

        self.insert_datadomains(set_locale, dbconnection)

        execute_sqlfile(get_full_filename('insert_obs_points_triggers_postgis.sql'), dbconnection)

        execute_sqlfile(get_full_filename('insert_functions_postgis.sql'), dbconnection)

        self.add_metadata_to_about_db(dbconnection, created_tables_sqls)

        dbconnection.vacuum()

        dbconnection.commit_and_closedb()

        """
        #The intention is to keep layer styles in the database by using the class AddLayerStyles but due to limitations in how layer styles are stored in the database, I will put this class on hold for a while.

        #Finally add the layer styles info into the data base
        AddLayerStyles(dbpath)
        """
        utils.stop_waiting_cursor()
    def general_import(self, goal_table, file_data, allow_obs_fk_import=False, _dbconnection=None, dump_temptable=False):
        """General method for importing an sqlite table into a goal_table

            self.temptableName must be the name of the table containing the new data to import.

        :param goal_table:
        :return:
        """
        try:
            if file_data is None or not file_data:
                return
            utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', '\nImport to %s starting\n--------------------'))%goal_table)

            utils.start_waiting_cursor()

            self.temptable_name = goal_table + '_temp'

            if not isinstance(_dbconnection, db_utils.DbConnectionManager):
                dbconnection = db_utils.DbConnectionManager()
            else:
                dbconnection = _dbconnection

            db_utils.activate_foreign_keys(activated=True, dbconnection=dbconnection)

            recsinfile = len(file_data[1:])
            table_info = db_utils.db_tables_columns_info(table=goal_table, dbconnection=dbconnection)
            if not table_info:
                raise MidvDataImporterError(ru(QCoreApplication.translate('midv_data_importer', 'The table %s did not exist. Update the database to latest version.')) % goal_table)
            else:
                table_info = table_info[goal_table]
            #POINT and LINESTRING must be cast as BLOB. So change the type to BLOB.
            column_headers_types = db_utils.change_cast_type_for_geometry_columns(dbconnection, table_info, goal_table)
            primary_keys = [row[1] for row in table_info if int(row[5])]        #Not null columns are allowed if they have a default value.
            not_null_columns = [row[1] for row in table_info if int(row[3]) and row[4] is None]
            #Only use the columns that exists in the goal table.
            existing_columns_in_goal_table = [col for col in file_data[0] if col in column_headers_types]
            existing_columns_in_temptable = file_data[0]
            missing_columns = [column for column in not_null_columns if column not in existing_columns_in_goal_table]

            if missing_columns:
                raise MidvDataImporterError(ru(QCoreApplication.translate('midv_data_importer', 'Required columns %s are missing for table %s')) % (', '.join(missing_columns), goal_table))

            primary_keys_for_concat = [pk for pk in primary_keys if pk in existing_columns_in_temptable]

            self.list_to_table(dbconnection, file_data, primary_keys_for_concat)

            #Delete records from self.temptable where yyyy-mm-dd hh:mm or yyyy-mm-dd hh:mm:ss already exist for the same date.
            nr_before = dbconnection.execute_and_fetchall('''select count(*) from %s''' % (self.temptable_name))[0][0]
            if 'date_time' in primary_keys:
                self.delete_existing_date_times_from_temptable(primary_keys, goal_table, dbconnection)
            nr_after = dbconnection.execute_and_fetchall('''select count(*) from %s''' % (self.temptable_name))[0][0]

            nr_same_date = nr_after - nr_before
            if nr_same_date > 0:
                utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', 'In total "%s" rows with the same date \non format yyyy-mm-dd hh:mm or yyyy-mm-dd hh:mm:ss already existed and will not be imported. %s rows remain.'))%(str(nr_same_date), str(nr_after)))
            if not nr_after > 0:
                utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('midv_data_importer', 'Nothing imported to %s after deleting duplicate date_times'))%goal_table)
                return

            #Special cases for some tables
            if goal_table == 'stratigraphy':
                self.check_and_delete_stratigraphy(existing_columns_in_goal_table, dbconnection)
            # Check if current table has geometry:
            geom_columns = db_utils.get_geometry_types(dbconnection, goal_table)
            for geom_col in geom_columns.keys():
                if geom_col in existing_columns_in_temptable:
                    self.calculate_geometry(geom_col, goal_table, dbconnection)

            # Dump temptable to csv for debugging
            if dump_temptable:
                dbconnection.dump_table_2_csv(self.temptable_name)

            # Import foreign keys in some special cases
            foreign_keys = db_utils.get_foreign_keys(goal_table, dbconnection=dbconnection)
            if foreign_keys:
                if not allow_obs_fk_import:
                    for table in ['obs_points', 'obs_lines']:
                        if table in foreign_keys:
                            del foreign_keys[table]

                if foreign_keys:
                    if self.foreign_keys_import_question is None:
                        msg = ru(QCoreApplication.translate('midv_data_importer', """Please note!\nForeign keys will be imported silently into "%s" if needed. \n\nProceed?""")) % (', '.join(list(foreign_keys.keys())))
                        utils.MessagebarAndLog.info(log_msg=msg)
                        stop_question = utils.Askuser("YesNo", msg, ru(QCoreApplication.translate('midv_data_importer', "Info!")))
                        if stop_question.result == 0:      # if the user wants to abort
                            raise UserInterruptError()
                        else:
                            self.foreign_keys_import_question = 1
                    if self.foreign_keys_import_question == 1:
                        nr_before = nr_after
                        self.import_foreign_keys(dbconnection, goal_table, self.temptable_name, foreign_keys, existing_columns_in_temptable)
                        nr_after = dbconnection.execute_and_fetchall('''select count(*) from %s''' % (self.temptable_name))[0][0]
                        nr_after_foreign_keys = nr_before - nr_after
                        utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', 'In total "%s" rows were deleted due to foreign keys restrictions and "%s" rows remain.'))%(str(nr_after_foreign_keys), str(nr_after)))

            if not nr_after > 0:
                raise MidvDataImporterError(ru(QCoreApplication.translate('midv_data_importer', 'Nothing imported, see log message panel')))

            #Finally import data:
            nr_failed_import = recsinfile - nr_after
            if nr_failed_import > 0:
                msg = ru(QCoreApplication.translate('midv_data_importer', """Please note!\nThere are %s rows in your data that can not be imported!\nDo you really want to import the rest?\nAnswering yes will start, from top of the imported file and only import the first of the duplicates.\n\nProceed?""" ))% (str(nr_failed_import))
                utils.MessagebarAndLog.info(log_msg=msg)
                stop_question = utils.Askuser("YesNo", msg, ru(QCoreApplication.translate('midv_data_importer', "Warning!")))
                if stop_question.result == 0:      # if the user wants to abort
                    raise UserInterruptError()

            sql = """INSERT INTO %s ("""%goal_table
            sql += ', '.join(sorted(existing_columns_in_goal_table))
            sql += """) SELECT """
            sql += ', '.join(["""(CASE WHEN (%s !='' AND %s !=' ' AND %s IS NOT NULL) THEN CAST(%s AS %s) ELSE %s END)""" % (colname, colname, colname, colname, column_headers_types[colname], db_utils.cast_null(column_headers_types[colname], dbconnection)) for colname in sorted(existing_columns_in_goal_table)])
            sql += """FROM %s""" % (self.temptable_name)
            if not_null_columns:
                sql += """ WHERE %s"""%' AND '.join(['%s IS NOT NULL'%notnullcol for notnullcol in sorted(not_null_columns)])

            recsbefore = dbconnection.execute_and_fetchall('select count(*) from %s' % (goal_table))[0][0]
            try:
                dbconnection.execute(sql)
            except Exception as e:
                utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('midv_data_importer', 'INSERT failed while importing to %s. Using INSERT OR IGNORE instead. Msg:\n')) % goal_table + ru(str(e)))
                sql = db_utils.add_insert_or_ignore_to_sql(sql, dbconnection)
                try:
                    dbconnection.execute(sql)
                except Exception as e:
                    try:
                        str(e)
                    except UnicodeDecodeError:
                        utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('midv_data_importer', 'Import failed, see log message panel')),
                                                        log_msg=ru(QCoreApplication.translate('midv_data_importer', 'Sql\n%s  failed.')) % (sql), duration=999)
                    else:
                        utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('midv_data_importer', 'Import failed, see log message panel')),
                                                        log_msg=ru(QCoreApplication.translate('midv_data_importer', 'Sql\n%s  failed.\nMsg:\n%s')) % (sql, ru(str(e))), duration=999)

            recsafter = dbconnection.execute_and_fetchall('select count(*) from %s' % (goal_table))[0][0]

            nr_imported = recsafter - recsbefore
            nr_excluded = recsinfile - nr_imported

            utils.MessagebarAndLog.info(bar_msg=ru(QCoreApplication.translate('midv_data_importer', '%s rows imported and %s excluded for table %s. See log message panel for details'))%(nr_imported, nr_excluded, goal_table),
                                        log_msg='--------------------')

            #If an external dbconnection is supplied, do not close it.
            if _dbconnection is None:
                dbconnection.commit_and_closedb()
            else:
                dbconnection.commit()
            utils.stop_waiting_cursor()
        except:
            utils.stop_waiting_cursor()
            try:
                # If an external dbconnection is supplied, do not close it.
                if _dbconnection is None:
                    dbconnection.closedb()
                else:
                    pass
            except NameError():
                pass
            except:
                utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('midv_data_importer', 'Closing database failed!')))
            raise
    def update_plot(self):
        """ Plots self.level_masl_ts, self.meas_ts and maybe self.head_ts """
        self.reset_plot_selects_and_calib_help()
        self.calib_help.setText(ru(QCoreApplication.translate('Calibrlogger', "Updating plot")))
        last_used_obsid = self.obsid
        obsid = self.load_obsid_and_init()
        utils.start_waiting_cursor()
        if obsid == None:
            self.calib_help.setText("")
            return
        self.axes.clear()
        
        p=[None]*2 # List for plot objects
    
        # Load manual reading (full time series) for the obsid
        if self.meas_ts.size:
            self.plot_recarray(self.axes, self.meas_ts, obsid + ru(QCoreApplication.translate('Calibrlogger', ' measurements')), 'o-', picker=5, zorder=15, color='#1f77b4ff')
        
        # Load Loggerlevels (full time series) for the obsid
        if self.loggerLineNodes.isChecked():
            logger_line_style = '.-'
            original_head_style = '--'
        else:
            logger_line_style = '-'
            original_head_style = '--'

        logger_time_list = self.timestring_list_to_time_list(self.a_recarray_to_timestring_list(self.level_masl_ts))
        self.plot_recarray(self.axes, self.level_masl_ts, obsid + ru(QCoreApplication.translate('Calibrlogger', ' adjusted logger')), logger_line_style, picker=5, time_list=logger_time_list, zorder=10, color='#ff7f0eff')

        #Plot the original head_cm
        if self.plot_logger_head.isChecked():
            self.plot_recarray(self.axes, self.head_ts_for_plot, obsid + ru(QCoreApplication.translate('Calibrlogger', ' original logger head')), original_head_style, picker=5, time_list=logger_time_list, zorder=5, color='#c1c1c1ff')

        """ Finish plot """
        self.axes.grid(True)
        self.axes.yaxis.set_major_formatter(tick.ScalarFormatter(useOffset=False, useMathText=False))
        self.calibrplotfigure.autofmt_xdate()
        self.axes.set_ylabel(ru(QCoreApplication.translate('Calibrlogger', 'Level (masl)')))  #This is the method that accepts even national characters ('åäö') in matplotlib axes labels
        self.axes.set_title(ru(QCoreApplication.translate('Calibrlogger', 'Plot for ')) + str(obsid))  #This is the method that accepts even national characters ('åäö') in matplotlib axes labels
        for label in self.axes.xaxis.get_ticklabels():
            label.set_fontsize(8)
        for label in self.axes.yaxis.get_ticklabels():
            label.set_fontsize(8)

        self.calibrplotfigure.tight_layout()

        if self.axes.legend_ is None:
            leg = self.axes.legend()

        self.canvas.draw()
        #plt.close(self.calibrplotfigure)#this closes reference to self.calibrplotfigure
        self.calib_help.setText("")

        if last_used_obsid == self.obsid:
            self.mpltoolbar.forward()
        else:
            #Clear choices
            self.reset_settings()
            self.mpltoolbar.update()

        utils.stop_waiting_cursor()