def test_calibrlogger_adjust_trend(self, mock_messagebar):
        db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)")
        db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 200)")
        db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 200)")
        db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 100)")

        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)
        gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)')
        calibrlogger.update_plot()
        calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00'))
        calibrlogger.L1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00'))
        calibrlogger.L2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00'))
        calibrlogger.M1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00'))
        calibrlogger.M2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00'))

        calibrlogger.adjust_trend_func()

        res = db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger')

        l = list(res[1][1])
        l[5] = '%.11e'%Decimal(l[5])
        res[1][1] = tuple(l)
        test = utils_for_tests.create_test_string(res)

        print(mock_messagebar.mock_calls)
        print(test)
        ref = '(True, [(rb1, 2017-02-01 00:00, None, None, None, 100.0, None), (rb1, 2017-02-10 00:00, None, None, None, -2.84217094304e-14, None)])'
        assert test == ref
Example #2
0
    def test_alter_data(self):
        observations = [{u'parametername': u'comment',
                         u'date_time': datestring_to_date(u'2016-01-01'),
                         u'sublocation': u'1',
                         u'value': u'shared_comment'},
                        {u'parametername': u'par_get_shared_comment',
                         u'date_time': datestring_to_date(u'2016-01-01'),
                         u'sublocation': u'1',
                         u'value': u'1'},
                        {u'parametername': u'par_not_get_shared_comment',
                         u'date_time': datestring_to_date(u'2016-01-02'),
                         u'sublocation': u'2',
                         u'value': u'1'},
                        {u'parametername': u'par_not_get_shared_comment',
                         u'date_time': datestring_to_date(u'2016-01-04'),
                         u'sublocation': u'1',
                         u'value': u'1'},
                        {u'parametername': u'comment',
                         u'date_time': datestring_to_date(u'2016-01-03'),
                         u'sublocation': u'1',
                         u'value': u'not_shared_comment'}
                        ]
        observations = self.comments_import.alter_data(observations)

        test_string = create_test_string(observations)
        reference_string = u'[{date_time: 2016-01-01 00:00:00, parametername: comment, skip_comment_import: True, sublocation: 1, value: shared_comment}, {comment: shared_comment, date_time: 2016-01-01 00:00:00, parametername: par_get_shared_comment, sublocation: 1, value: 1}, {date_time: 2016-01-02 00:00:00, parametername: par_not_get_shared_comment, sublocation: 2, value: 1}, {date_time: 2016-01-04 00:00:00, parametername: par_not_get_shared_comment, sublocation: 1, value: 1}, {date_time: 2016-01-03 00:00:00, parametername: comment, sublocation: 1, value: not_shared_comment}]'
        assert test_string == reference_string
Example #3
0
    def test_calibrlogger_adjust_trend(self, mock_messagebar):
        db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)")
        db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 200)")
        db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 200)")
        db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 100)")

        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)
        gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)')
        calibrlogger.update_plot()
        calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00'))
        calibrlogger.L1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00'))
        calibrlogger.L2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00'))
        calibrlogger.M1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00'))
        calibrlogger.M2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00'))
        calibrlogger.L1_level.setText('100')
        calibrlogger.L2_level.setText('200')
        calibrlogger.M1_level.setText('200')
        calibrlogger.M2_level.setText('100')

        calibrlogger.adjust_trend_func()
        res = db_utils.sql_load_fr_db('SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger')
        l = list(res[1][1])
        l[5] = '%.11e'%Decimal(l[5])
        res[1][1] = tuple(l)
        test = utils_for_tests.create_test_string(res)
        print(mock_messagebar.mock_calls)

        ref = '(True, [(rb1, 2017-02-01 00:00, None, None, None, 100.0, None), (rb1, 2017-02-10 00:00, None, None, None, -2.84217094304e-14, None)])'
        print("Ref")

        print(ref)
        print("Test")
        print(test)
        assert test == ref
    def filter_dates_from_filedata(file_data,
                                   obsid_last_imported_dates,
                                   obsid_header_name='obsid',
                                   date_time_header_name='date_time'):
        """
        :param file_data: a list of lists like [['obsid', 'date_time', ...], [obsid1, date_time1, ...]]
        :param obsid_last_imported_dates: a dict like {'obsid1': last_date_in_db, ...}
        :param obsid_header_name: the name of the obsid header
        :param date_time_header_name: the name of the date_time header
        :return: A filtered list with only dates after last date is included for each obsid.

        >>> DiverofficeImport.filter_dates_from_filedata([['obsid', 'date_time'], ['obs1', '2016-09-28'], ['obs1', '2016-09-29']], {'obs1': [('2016-09-28', )]})
        [['obsid', 'date_time'], ['obs1', '2016-09-29']]
        """
        if len(file_data) == 1:
            return file_data

        obsid_idx = file_data[0].index(obsid_header_name)
        date_time_idx = file_data[0].index(date_time_header_name)
        filtered_file_data = [
            row for row in file_data[1:]
            if datestring_to_date(row[date_time_idx]) > datestring_to_date(
                obsid_last_imported_dates.get(row[obsid_idx], [(
                    '0001-01-01 00:00:00', )])[0][0])
        ]

        filtered_file_data.reverse()
        filtered_file_data.append(file_data[0])
        filtered_file_data.reverse()
        return filtered_file_data
Example #5
0
 def test_prepare_w_levels_data_to_both(self):
     observations = [{'obsid': 'obs1', 'date_time': datestring_to_date('2016-01-01 00:00'), 'value': '123,4', 'level_masl': '567'},
                     {'obsid': 'obs1', 'date_time': datestring_to_date('2016-01-01 00:02'), 'meas': '897'}]
     test_string = create_test_string(FieldloggerImport.prepare_w_levels_data(observations))
     reference_string = '[[obsid, date_time, meas, h_toc, level_masl, comment], [obs1, 2016-01-01 00:00:00, , , 567, ], [obs1, 2016-01-01 00:02:00, 897, , , ]]'
     print(test_string)
     assert test_string == reference_string
Example #6
0
    def __init__(self, calendar=False, stretch=True):
        super(DateTimeFilter, self).__init__()
        self.label = PyQt4.QtGui.QLabel(
            ru(
                QCoreApplication.translate(u'DateTimeFilter',
                                           u'Import data from: ')))
        self.from_datetimeedit = PyQt4.QtGui.QDateTimeEdit(
            datestring_to_date(u'1901-01-01 00:00:00'))
        self.from_datetimeedit.setDisplayFormat(u'yyyy-MM-dd hh:mm:ss')
        self.from_datetimeedit.setMinimumWidth(180)

        self.label_to = PyQt4.QtGui.QLabel(
            ru(QCoreApplication.translate(u'DateTimeFilter', u'to: ')))
        self.to_datetimeedit = PyQt4.QtGui.QDateTimeEdit(
            datestring_to_date(u'2099-12-31 23:59:59'))
        self.to_datetimeedit.setDisplayFormat(u'yyyy-MM-dd hh:mm:ss')
        self.to_datetimeedit.setMinimumWidth(180)

        if calendar:
            self.from_datetimeedit.setCalendarPopup(True)
            self.to_datetimeedit.setCalendarPopup(True)
        #self.import_after_last_date = PyQt4.QtGui.QCheckBox(u"Import after latest date in database for each obsid")
        for widget in [
                self.label, self.from_datetimeedit, self.label_to,
                self.to_datetimeedit
        ]:
            self.layout.addWidget(widget)
        if stretch:
            self.layout.addStretch()
Example #7
0
    def test_calc_selected_dont_overwrite_dont_skip_nulls(
            self, mock_selected_obsids, mock_messagebar, mock_skippopup):
        mock_selected_obsids.return_value = [u'rb1', u'rb2']
        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''')
        db_utils.sql_alter_db(
            u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', NULL)''')
        db_utils.sql_alter_db(
            u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT into w_levels (obsid, meas, level_masl, date_time) VALUES ('rb2', 555, 667, '2005-01-02 00:00:00')'''
        )
        self.calclvl.FromDateTime = QtGui.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(
            datestring_to_date(u'2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtGui.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(
            datestring_to_date(u'2010-01-01 00:00:00'))
        self.calclvl.checkBox_overwrite_prev.setChecked(False)

        self.calclvl.calcselected()
        #self.checkBox_skipnulls

        test_string = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db(
                u'SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid, date_time'
            ))
        reference_string = u'(True, [(rb1, 2005-01-01 00:00:00, 222.0, None, None), (rb2, 2005-01-01 00:00:00, 444.0, None, None), (rb2, 2005-01-02 00:00:00, 555.0, None, 667.0)])'
        print(str(mock_messagebar.mock_calls))
        print(test_string)
        assert test_string == reference_string
Example #8
0
    def test_calc_selected(self, mock_selected_obsids):
        mock_selected_obsids.return_value = [u'rb1']
        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''')
        db_utils.sql_alter_db(
            u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', 4)''')
        db_utils.sql_alter_db(
            u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')'''
        )
        self.calclvl.FromDateTime = QtGui.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(
            datestring_to_date(u'2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtGui.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(
            datestring_to_date(u'2010-01-01 00:00:00'))
        self.calclvl.calcselected()

        test_string = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db(
                u'SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid'
            ))
        reference_string = u'(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0), (rb2, 2005-01-01 00:00:00, 444.0, None, None)])'
        assert test_string == reference_string
Example #9
0
    def test_alter_data(self):
        observations = [{'parametername': 'comment',
                         'date_time': datestring_to_date('2016-01-01'),
                         'sublocation': '1',
                         'value': 'shared_comment'},
                        {'parametername': 'par_get_shared_comment',
                         'date_time': datestring_to_date('2016-01-01'),
                         'sublocation': '1',
                         'value': '1'},
                        {'parametername': 'par_not_get_shared_comment',
                         'date_time': datestring_to_date('2016-01-02'),
                         'sublocation': '2',
                         'value': '1'},
                        {'parametername': 'par_not_get_shared_comment',
                         'date_time': datestring_to_date('2016-01-04'),
                         'sublocation': '1',
                         'value': '1'},
                        {'parametername': 'comment',
                         'date_time': datestring_to_date('2016-01-03'),
                         'sublocation': '1',
                         'value': 'not_shared_comment'}
                        ]
        observations = self.comments_import.alter_data(observations)

        test_string = create_test_string(observations)
        reference_string = create_test_string([{'date_time': '2016-01-01 00:00:00', 'parametername': 'comment', 'skip_comment_import': True, 'sublocation': '1', 'value': 'shared_comment'}, {'comment': 'shared_comment', 'date_time': '2016-01-01 00:00:00', 'parametername': 'par_get_shared_comment', 'sublocation': '1', 'value': '1'}, {'date_time': '2016-01-02 00:00:00', 'parametername': 'par_not_get_shared_comment', 'sublocation': '2', 'value': '1'}, {'date_time': '2016-01-04 00:00:00', 'parametername': 'par_not_get_shared_comment', 'sublocation': '1', 'value': '1'}, {'date_time': '2016-01-03 00:00:00', 'parametername': 'comment', 'sublocation': '1', 'value': 'not_shared_comment'}])
        assert test_string == reference_string
    def calculate_offset(self):
        """ Part of adjustment method 3. adjust level_masl by clicking in plot.
        this method extracts the head from head_ts with the same date as the line node.
            4. Calculating y-position - head (or level_masl) and setting self.LoggerPos.
            5. Run calibration.
        """            
        if self.log_pos is not None and self.y_pos is not None:
            utils.start_waiting_cursor()

            logger_ts = self.level_masl_ts
            
            y_pos = self.y_pos
            log_pos = self.log_pos
            self.y_pos = None
            self.log_pos = None
            log_pos_date = datestring_to_date(log_pos).replace(tzinfo=None)
            logger_value = None

            #Get the value for the selected node
            for raw_date, logger_value in logger_ts:
                date = datestring_to_date(raw_date).replace(tzinfo=None)
                if date == log_pos_date:
                    break

            if logger_value is None:
                utils.pop_up_info(ru(QCoreApplication.translate('Calibrlogger', "No connection between level_masl dates and logger date could be made!\nTry again or choose a new logger line node!")))
            else:
                self.Add2Levelmasl.setText(str(float(y_pos) - float(logger_value)))

                utils.stop_waiting_cursor()

        self.pushButtonMpos.setEnabled(False)
    def test_calibrlogger_calc_best_fit_add_no_matches_same_to_date(
            self, mock_messagebar, skip_popup):
        db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db(
            "INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)"
        )
        db_utils.sql_alter_db(
            "INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 01:00', 50)"
        )
        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)

        calibrlogger.update_plot()

        calibrlogger.loggerpos_masl_or_offset_state = 2
        calibrlogger.FromDateTime.setDateTime(
            date_utils.datestring_to_date('2010-02-01 01:00'))
        calibrlogger.ToDateTime.setDateTime(
            date_utils.datestring_to_date('2017-02-01 01:00'))
        gui_utils.set_combobox(calibrlogger.combobox_obsid,
                               'rb1 (uncalibrated)')
        calibrlogger.bestFitSearchRadius.setText('2 hours')

        calibrlogger.calc_best_fit()

        test = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger'))
        ref = '(True, [(rb1, 2017-02-01 01:00, None, None, None, 50.0, None)])'
        print(test)
        assert test == ref
    def calibrate_from_plot_selection(self):
        """ Calibrates by selecting a line node and a y-position on the plot

            The user have to click on the button three times and follow instructions.
        
            The process:
            1. Selecting a line node.
            2. Selecting a selecting a y-position from the plot.
            3. Extracting the head from head_ts with the same date as the line node.
            4. Calculating y-position - head (or level_masl) and setting self.LoggerPos.
            5. Run calibration.
        """            
        #Run init to make sure self.meas_ts and self.head_ts is updated for the current obsid.           
        self.load_obsid_and_init()
        self.deactivate_pan_zoom()
        self.canvas.setFocusPolicy(Qt.ClickFocus)
        self.canvas.setFocus()

        if self.log_pos is None:
            self.calib_help.setText("Select a logger node.")
            self.cid.append(self.canvas.mpl_connect('pick_event', self.set_log_pos_from_node_date_click))  
        
        if self.log_pos is not None and self.y_pos is None:
            self.calib_help.setText("Select a y position to move to.")
            self.cid.append(self.canvas.mpl_connect('button_press_event', self.set_y_pos_from_y_click))
            
        if self.log_pos is not None and self.y_pos is not None:
            PyQt4.QtGui.QApplication.setOverrideCursor(PyQt4.QtCore.Qt.WaitCursor)

            if self.loggerpos_masl_or_offset_state == 1:
                logger_ts = self.head_ts
            else:
                logger_ts = self.level_masl_ts
            
            y_pos = self.y_pos
            log_pos = self.log_pos
            self.y_pos = None
            self.log_pos = None
            log_pos_date = datestring_to_date(log_pos).replace(tzinfo=None)
            logger_value = None

            #Get the value for the selected node
            for idx, date_value_tuple in enumerate(logger_ts):
                raw_date, logger_value = date_value_tuple
                date = datestring_to_date(raw_date).replace(tzinfo=None)
                if date == log_pos_date:
                    break

            if logger_value is None:
                utils.pop_up_info("No connection between head_ts dates and logger date could be made!\nTry again or choose a new logger line node!")   
            else:
                self.LoggerPos.setText(str(float(y_pos) - float(logger_value)))

                PyQt4.QtGui.QApplication.restoreOverrideCursor()
                self.calibrateandplot()

            self.calib_help.setText("")
Example #13
0
 def __init__(self):
     super(DateTimeFilter, self).__init__()
     self.label = PyQt4.QtGui.QLabel(u'Import data from: ')
     self.from_datetimeedit = PyQt4.QtGui.QDateTimeEdit(datestring_to_date(u'1900-01-01 00:00:00'))
     self.label_to = PyQt4.QtGui.QLabel(u'to: ')
     self.to_datetimeedit = PyQt4.QtGui.QDateTimeEdit(datestring_to_date(u'2099-12-31 23:59:59'))
     #self.import_after_last_date = PyQt4.QtGui.QCheckBox(u"Import after latest date in database for each obsid")
     for widget in [self.label, self.from_datetimeedit, self.label_to, self.to_datetimeedit]:
         self.layout.addWidget(widget)
     self.layout.addStretch()
Example #14
0
    def test_calcall(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "h_toc") VALUES ('rb1', 1)''')
        utils.sql_alter_db(u'''INSERT into w_levels ("obsid", "meas", "date_time") VALUES ('rb1', 222, '2005-01-01 00:00:00')''')
        self.calclvl.FromDateTime = QtGui.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(datestring_to_date(u'2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtGui.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(datestring_to_date(u'2010-01-01 00:00:00'))
        self.calclvl.calcall()

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, date_time, meas, h_toc, level_masl from w_levels'))
        reference_string = u'(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0)])'
        assert test_string == reference_string
    def test_calcall(self):
        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''')
        db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''')
        self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00'))
        self.calclvl.calcall()

        test_string = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels'))
        reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0)])'
        assert test_string == reference_string
Example #16
0
    def test_calcall(self, mock_messagebar):
        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''')
        db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''')
        self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00'))
        self.calclvl.calcall()

        test_string = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels'))
        reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0)])'
        assert test_string == reference_string
Example #17
0
 def test_prepare_w_flow_data_assert_only_ask_instrument_once(self, mock_flow_instruments,
                              mock_instrument_not_found):
     mock_flow_instruments = [True, {}]
     mock_instrument_not_found.return_value.answer = u'ok'
     mock_instrument_not_found.return_value.value = u'inst1'
     observations = [{u'obsid': u'obs1', u'flowtype': u'atype',
                      u'date_time': datestring_to_date(u'2016-01-01 00:00'),
                      u'unit': u'aunit', u'value': u'123,4'},
                     {u'obsid': u'obs1', u'flowtype': u'atype',
                      u'date_time': datestring_to_date(u'2016-01-02 00:00'),
                      u'unit': u'aunit', u'value': u'223,4'}]
     test_string = create_test_string(FieldloggerImport.prepare_w_flow_data(observations))
     mock_instrument_not_found.assert_called_once_with(combobox_label=u'Instrument id:s in database.\nThe last used instrument id for the current obsid is prefilled:', default_value=u'', dialogtitle=u'Submit instrument id', existing_list=[u''], msg=u'Submit the instrument id for the measurement:\n obs1, 2016-01-01 00:00:00, atype, aunit')
     reference_string = u'[[obsid, instrumentid, flowtype, date_time, reading, unit, comment], [obs1, inst1, atype, 2016-01-01 00:00:00, 123.4, aunit, ], [obs1, inst1, atype, 2016-01-02 00:00:00, 223.4, aunit, ]]'
     assert test_string == reference_string
Example #18
0
 def test_prepare_w_qual_field_data_with_depth(self):
     observations = [{'obsid': 'obs1', 'date_time': datestring_to_date('2016-01-01 00:00'),
                      'parameter': 'par1', 'unit': 'unit1', 'staff': 'staff1', 'instrument': 'inst1',
                      'comment': 'comment1', 'value': '1.23', 'depth': '123.4'}]
     test_string = create_test_string(FieldloggerImport.prepare_w_qual_field_data(observations))
     reference_string = '[[obsid, staff, date_time, instrument, parameter, reading_num, reading_txt, unit, depth, comment], [obs1, staff1, 2016-01-01 00:00:00, inst1, par1, 1.23, 1.23, unit1, 123.4, comment1]]'
     assert test_string == reference_string
    def test_calselected(self, mock_messagebar, mock_getselectedobjectnames, mock_iface):
        mock_getselectedobjectnames.return_value = ['1']

        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('1')''')
        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('2')''')
        db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-02 00:00', 2.0, 'm3')''')
        db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-01 00:00', 1.0, 'm3')''')
        db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-04 00:00', 10.0, 'm3')''')
        db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-03 00:00', 5.0, 'm3')''')
        db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('2', 'inst2', 'Accvol', '2019-02-04 00:00', 10.0, 'm3')''')
        db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('2', 'inst2', 'Accvol', '2019-02-03 00:00', 5.0, 'm3')''')

        widget = QtWidgets.QWidget()
        calcave = w_flow_calc_aveflow.Calcave(widget)
        calcave.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00'))
        calcave.calcselected()
        print(str(mock_messagebar.mock_calls))
        #insert or ignore into w_flow(obsid,instrumentid,flowtype,date_time,reading,unit) values('%s','%s','Aveflow','%s','%s','l/s')
        res = db_utils.sql_load_fr_db('''SELECT obsid, instrumentid, flowtype, date_time, ROUND(reading, 4), unit FROM w_flow ORDER BY obsid, flowtype, date_time''')[1]
        test = utils.anything_to_string_representation(res)


        print(test)
        reference = '[("1", "inst1", "Accvol", "2019-02-01 00:00", 1.0, "m3", ), ("1", "inst1", "Accvol", "2019-02-02 00:00", 2.0, "m3", ), ("1", "inst1", "Accvol", "2019-02-03 00:00", 5.0, "m3", ), ("1", "inst1", "Accvol", "2019-02-04 00:00", 10.0, "m3", ), ("1", "inst1", "Aveflow", "2019-02-02 00:00", 0.0116, "l/s", ), ("1", "inst1", "Aveflow", "2019-02-03 00:00", 0.0347, "l/s", ), ("1", "inst1", "Aveflow", "2019-02-04 00:00", 0.0579, "l/s", ), ("2", "inst2", "Accvol", "2019-02-03 00:00", 5.0, "m3", ), ("2", "inst2", "Accvol", "2019-02-04 00:00", 10.0, "m3", )]'
        #result_list = self.calcave.observations
        #reference_list = ['1', '2']
        assert test == reference
    def test_calibrlogger_set_log_pos(self, mock_messagebar):
        db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db(
            "INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)"
        )
        db_utils.sql_alter_db(
            "INSERT INTO w_levels_logger (obsid, date_time, head_cm) VALUES ('rb1', '2017-02-01 00:00', 100)"
        )

        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)
        calibrlogger.update_plot()

        calibrlogger.FromDateTime.setDateTime(
            date_utils.datestring_to_date('2000-01-01 00:00:00'))
        calibrlogger.LoggerPos.setText('2')
        gui_utils.set_combobox(calibrlogger.combobox_obsid,
                               'rb1 (uncalibrated)')

        calibrlogger.set_logger_pos()
        print(str(mock_messagebar.mock_calls))
        test = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger'))
        ref = '(True, [(rb1, 2017-02-01 00:00, 100.0, None, None, 3.0, None)])'
        print(test)
        assert test == ref
    def test_calc_selected(self, mock_selected_obsids):
        mock_selected_obsids.return_value = ['rb1']
        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''')
        db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''')
        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', 4)''')
        db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')''')
        self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00'))
        self.calclvl.calcselected()

        test_string = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid'))
        reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0), (rb2, 2005-01-01 00:00:00, 444.0, None, None)])'
        assert test_string == reference_string
Example #22
0
 def test_prepare_w_flow_data(self, mock_flow_instruments, mock_instrument_not_found):
     mock_flow_instruments = [True, {}]
     mock_instrument_not_found.return_value.answer = u'ok'
     mock_instrument_not_found.return_value.value = u'inst1'
     observations = [{u'obsid': u'obs1', u'flowtype': u'atype', u'date_time': datestring_to_date(u'2016-01-01 00:00'), u'unit': u'aunit', u'value': u'123,4'}]
     test_string = create_test_string(FieldloggerImport.prepare_w_flow_data(observations))
     reference_string = u'[[obsid, instrumentid, flowtype, date_time, reading, unit, comment], [obs1, inst1, atype, 2016-01-01 00:00:00, 123.4, aunit, ]]'
     assert test_string == reference_string
Example #23
0
 def test_prepare_w_flow_data_assert_only_ask_instrument_once(self, mock_flow_instruments,
                              mock_instrument_not_found):
     mock_flow_instruments = [True, {}]
     mock_instrument_not_found.return_value.answer = 'ok'
     mock_instrument_not_found.return_value.value = 'inst1'
     observations = [{'sublocation': 'obs1.sub',
                      'obsid': 'obs1', 'flowtype': 'atype',
                      'date_time': datestring_to_date('2016-01-01 00:00'),
                      'unit': 'aunit', 'value': '123,4'},
                     {'sublocation': 'obs1.sub',
                      'obsid': 'obs1', 'flowtype': 'atype',
                      'date_time': datestring_to_date('2016-01-02 00:00'),
                      'unit': 'aunit', 'value': '223,4'}]
     test_string = create_test_string(FieldloggerImport.prepare_w_flow_data(observations))
     mock_instrument_not_found.assert_called_once_with(combobox_label='Instrument id:s in database for obsid obs1.\nThe last used instrument id for obsid obs1 is prefilled:', default_value='', dialogtitle='Submit instrument id', existing_list=[''], msg='Submit the instrument id for the measurement:\n obs1.sub, obs1, 2016-01-01 00:00:00, atype, aunit')
     reference_string = '[[obsid, instrumentid, flowtype, date_time, reading, unit, comment], [obs1, inst1, atype, 2016-01-01 00:00:00, 123.4, aunit, ], [obs1, inst1, atype, 2016-01-02 00:00:00, 223.4, aunit, ]]'
     assert test_string == reference_string
Example #24
0
 def test_date_time_filter_observation_return_observation_one_second_to_to(self):
     datetimefilter = DateTimeFilter()
     datetimefilter.from_date = '2016-01-01'
     datetimefilter.to_date = '2016-01-10'
     observation = datetimefilter.alter_data({'date_time': datestring_to_date('2016-01-09 23:59:59')})
     test_string = create_test_string(observation)
     reference = '{date_time: 2016-01-09 23:59:59}'
     assert test_string == reference
    def test_filter_dates_from_filedata(self):

        file_data = [['obsid', 'date_time'], ['rb1', '2015-05-01 00:00:00'], ['rb1', '2016-05-01 00:00'], ['rb2', '2015-05-01 00:00:00'], ['rb2', '2016-05-01 00:00'], ['rb3', '2015-05-01 00:00:00'], ['rb3', '2016-05-01 00:00']]
        obsid_last_imported_dates = {'rb1': [(datestring_to_date('2016-01-01 00:00:00'),)], 'rb2': [(datestring_to_date('2017-01-01 00:00:00'),)]}
        test_file_data = utils_for_tests.create_test_string(DiverofficeImport.filter_dates_from_filedata(file_data, obsid_last_imported_dates))

        reference_file_data = '''[[obsid, date_time], [rb1, 2016-05-01 00:00], [rb3, 2015-05-01 00:00:00], [rb3, 2016-05-01 00:00]]'''

        assert test_file_data == reference_file_data
    def reset_settings(self):

        self.ToDateTime.setDateTime(datestring_to_date('2099-12-31 23:59:59'))
        self.Add2Levelmasl.setText('')
        self.bestFitSearchRadius.setText('10 minutes')
        #self.mpltoolbar.home()

        last_calibration = self.getlastcalibration(self.obsid)
        try:
            if last_calibration[0][1] and last_calibration[0][0]:
                self.LoggerPos.setText(str(last_calibration[0][1]))
                self.FromDateTime.setDateTime(datestring_to_date(last_calibration[0][0]))
            else:
                self.LoggerPos.setText('')
                self.FromDateTime.setDateTime(datestring_to_date('2099-12-31 23:59:59'))
        except Exception as e:
            utils.MessagebarAndLog.info(log_msg=ru(QCoreApplication.translate('Calibrlogger', 'Getting last calibration failed for obsid %s, msg: %s'))%(self.obsid, str(e)))
            self.LoggerPos.setText('')
            self.FromDateTime.setDateTime(datestring_to_date('2099-12-31 23:59:59'))
    def test_calibrlogger_calc_best_fit_add_no_matches_same_to_date(self, mock_messagebar, skip_popup):
        db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)")
        db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 01:00', 50)")
        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)

        calibrlogger.update_plot()

        calibrlogger.loggerpos_masl_or_offset_state = 2
        calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2010-02-01 01:00'))
        calibrlogger.ToDateTime.setDateTime(date_utils.datestring_to_date('2017-02-01 01:00'))
        gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)')
        calibrlogger.bestFitSearchRadius.setText('2 hours')

        calibrlogger.calc_best_fit()

        test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger'))
        ref = '(True, [(rb1, 2017-02-01 01:00, None, None, None, 50.0, None)])'
        print(test)
        assert test == ref
Example #28
0
    def __init__(self, calendar=False, stretch=True):
        super(DateTimeFilter, self).__init__()
        self.label = qgis.PyQt.QtWidgets.QLabel(ru(QCoreApplication.translate('DateTimeFilter', 'Import data from: ')))
        self.from_datetimeedit = qgis.PyQt.QtWidgets.QDateTimeEdit(datestring_to_date('1901-01-01 00:00:00'))
        self.from_datetimeedit.setDisplayFormat('yyyy-MM-dd hh:mm:ss')
        self.from_datetimeedit.setMinimumWidth(180)

        self.label_to = qgis.PyQt.QtWidgets.QLabel(ru(QCoreApplication.translate('DateTimeFilter', 'to: ')))
        self.to_datetimeedit = qgis.PyQt.QtWidgets.QDateTimeEdit(datestring_to_date('2099-12-31 23:59:59'))
        self.to_datetimeedit.setDisplayFormat('yyyy-MM-dd hh:mm:ss')
        self.to_datetimeedit.setMinimumWidth(180)

        if calendar:
            self.from_datetimeedit.setCalendarPopup(True)
            self.to_datetimeedit.setCalendarPopup(True)
        #self.import_after_last_date = PyQt4.QtWidgets.QCheckBox("Import after latest date in database for each obsid")
        for widget in [self.label, self.from_datetimeedit, self.label_to, self.to_datetimeedit]:
            self.layout.addWidget(widget)
        if stretch:
            self.layout.addStretch()
    def test_calc_selected_dont_overwrite(self, mock_selected_obsids, mock_messagebar):
        mock_selected_obsids.return_value = ['rb1', 'rb2']
        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''')
        db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''')
        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', 4)''')
        db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')''')
        db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, level_masl, date_time) VALUES ('rb2', 555, 667, '2005-01-02 00:00:00')''')
        self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00'))
        self.calclvl.checkBox_overwrite_prev.setChecked(False)
        self.calclvl.calcselected()
        #self.checkBox_skipnulls

        test_string = utils_for_tests.create_test_string(
            db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid, date_time'))
        reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0), (rb2, 2005-01-01 00:00:00, 444.0, 4.0, -440.0), (rb2, 2005-01-02 00:00:00, 555.0, None, 667.0)])'
        print(str(mock_messagebar.mock_calls))
        print(test_string)
        assert test_string == reference_string
    def test_prepare_w_flow_data_assert_only_ask_instrument_twice(self, mock_flow_instruments,
                                 mock_instrument_not_found):
        mock_flow_instruments = [True, {}]
        mock_instrument_not_found.return_value.answer = 'ok'
        mock_instrument_not_found.return_value.value = 'inst1'
        observations = [{'sublocation': 'obs1.sub',
                         'obsid': 'obs1', 'flowtype': 'atype',
                         'date_time': datestring_to_date('2016-01-01 00:00'),
                         'unit': 'aunit', 'value': '123,4'},
                        {'sublocation': 'obs2.sub',
                         'obsid': 'obs2', 'flowtype': 'atype',
                         'date_time': datestring_to_date('2016-01-02 00:00'),
                         'unit': 'aunit', 'value': '223,4'},
                        {'sublocation': 'obs2.sub',
                         'obsid': 'obs2', 'flowtype': 'atype',
                         'date_time': datestring_to_date('2016-01-03 00:00'),
                         'unit': 'aunit', 'value': '223,4'}]
        test_string = create_test_string(FieldloggerImport.prepare_w_flow_data(observations))
        expected_calls = [call(combobox_label='Instrument id:s in database for obsid obs1.\nThe last used instrument id for obsid obs1 is prefilled:', default_value='', dialogtitle='Submit instrument id', existing_list=[''], msg='Submit the instrument id for the measurement:\n obs1.sub, obs1, 2016-01-01 00:00:00, atype, aunit'),
 call(combobox_label='Instrument id:s in database for obsid obs2.\nThe last used instrument id for obsid obs2 is prefilled:', default_value='', dialogtitle='Submit instrument id', existing_list=[''], msg='Submit the instrument id for the measurement:\n obs2.sub, obs2, 2016-01-02 00:00:00, atype, aunit')]
        assert mock_instrument_not_found.mock_calls == expected_calls
        reference_string = '[[obsid, instrumentid, flowtype, date_time, reading, unit, comment], [obs1, inst1, atype, 2016-01-01 00:00:00, 123.4, aunit, ], [obs2, inst1, atype, 2016-01-02 00:00:00, 223.4, aunit, ], [obs2, inst1, atype, 2016-01-03 00:00:00, 223.4, aunit, ]]'
        assert test_string == reference_string
Example #31
0
    def convert_datetime(self, date_time):
        if self.source_tz is None:
            return date_time

        source_td = date_utils.parse_timezone_to_timedelta(self.source_tz)
        target_td = date_utils.parse_timezone_to_timedelta(self.target_tz)

        diff = target_td - source_td

        if diff == 0:
            return date_time
        else:
            new_date = date_utils.datestring_to_date(date_time) + diff
            return new_date
    def convert_datetime(self, date_time):
        if self.source_tz is None:
            return date_time

        source_td = date_utils.parse_timezone_to_timedelta(self.source_tz)
        target_td = date_utils.parse_timezone_to_timedelta(self.target_tz)

        diff = target_td - source_td

        if diff == 0:
            return date_time
        else:
            new_date = date_utils.datestring_to_date(date_time) + diff
            return new_date
Example #33
0
    def test_calibrlogger_adjust_trend(self, mock_messagebar):
        db_utils.sql_alter_db(u"INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db(u"INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)")
        db_utils.sql_alter_db(u"INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 200)")
        db_utils.sql_alter_db(u"INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 200)")
        db_utils.sql_alter_db(u"INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 100)")

        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)
        gui_utils.set_combobox(calibrlogger.combobox_obsid, u'rb1 (uncalibrated)')
        calibrlogger.update_plot()
        calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date(u'2000-01-01 00:00:00'))
        calibrlogger.L1_date.setDateTime(date_utils.datestring_to_date(u'2017-02-01 00:00'))
        calibrlogger.L2_date.setDateTime(date_utils.datestring_to_date(u'2017-02-10 00:00'))
        calibrlogger.M1_date.setDateTime(date_utils.datestring_to_date(u'2017-02-01 00:00'))
        calibrlogger.M2_date.setDateTime(date_utils.datestring_to_date(u'2017-02-10 00:00'))

        calibrlogger.adjust_trend_func()

        test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db(u'SELECT * FROM w_levels_logger'))
        print(mock_messagebar.mock_calls)
        print(test)
        ref = u'(True, [(rb1, 2017-02-01 00:00, None, None, None, 100.0, None), (rb1, 2017-02-10 00:00, None, None, None, -2.84217094304e-14, None)])'
        assert test == ref
    def test_calibrlogger_add_to_level_masl(self, mock_messagebar):
        db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)")
        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)

        calibrlogger.update_plot()

        calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00'))
        calibrlogger.Add2Levelmasl.setText('50')
        gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)')

        calibrlogger.add_to_level_masl()

        test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger'))
        ref = '(True, [(rb1, 2017-02-01 00:00, None, None, None, 150.0, None)])'
        print(test)
        assert test == ref
Example #35
0
    def test_calibrlogger_add_to_level_masl(self, mock_messagebar):
        db_utils.sql_alter_db(u"INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db(u"INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)")
        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)

        calibrlogger.update_plot()

        calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date(u'2000-01-01 00:00:00'))
        calibrlogger.Add2Levelmasl.setText(u'50')
        gui_utils.set_combobox(calibrlogger.combobox_obsid, u'rb1 (uncalibrated)')

        calibrlogger.add_to_level_masl()

        test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db(u'SELECT * FROM w_levels_logger'))
        ref = u'(True, [(rb1, 2017-02-01 00:00, None, None, None, 150.0, None)])'
        print(test)
        assert test == ref
Example #36
0
def fix_date(date_time, filename, tz_converter=None):
    try:
        dt = datetime.datetime.strptime(date_time[:-2].rstrip(), '%m/%d/%y %I:%M:%S')
    except ValueError:
        dt = date_utils.datestring_to_date(date_time)
        if dt is None:
            raise FileError(ru(QCoreApplication.translate('HobologgerImport',
                                                          '''Dateformat in file %s could not be parsed.''')) % filename)
    else:
        dt_end = date_time[-2:]
        if dt_end.lower() in ('em', 'pm'):
            dt = date_utils.dateshift(dt, 12, 'hours')

    if tz_converter is not None:
        dt = tz_converter.convert_datetime(dt)

    return dt
def fix_date(date_time, filename, tz_converter=None):
    try:
        dt = datetime.datetime.strptime(date_time[:-2].rstrip(), '%m/%d/%y %I:%M:%S')
    except ValueError:
        dt = date_utils.datestring_to_date(date_time)
        if dt is None:
            raise FileError(ru(QCoreApplication.translate('HobologgerImport',
                                                          '''Dateformat in file %s could not be parsed.''')) % filename)
    else:
        dt_end = date_time[-2:]
        if dt_end.lower() in ('em', 'pm'):
            dt = date_utils.dateshift(dt, 12, 'hours')

    if tz_converter is not None:
        dt = tz_converter.convert_datetime(dt)

    return dt
Example #38
0
    def test_calibrlogger_calc_best_fit_log_pos_out_of_radius(self, mock_messagebar, skip_popup):
        db_utils.sql_alter_db(u"INSERT INTO obs_points (obsid) VALUES ('rb1')")
        db_utils.sql_alter_db(u"INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)")
        db_utils.sql_alter_db(u"INSERT INTO w_levels_logger (obsid, date_time, head_cm) VALUES ('rb1', '2017-03-01 00:00', 50)")
        calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms)

        calibrlogger.update_plot()

        calibrlogger.loggerpos_masl_or_offset_state = 1
        calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date(u'2000-01-01 00:00:00'))
        gui_utils.set_combobox(calibrlogger.combobox_obsid, u'rb1 (uncalibrated)')

        calibrlogger.calc_best_fit()

        test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db(u'SELECT * FROM w_levels_logger'))
        ref = u'(True, [(rb1, 2017-03-01 00:00, 50.0, None, None, None, None)])'
        print(test)
        assert test == ref
Example #39
0
    def parse_rows(f):
        """
        Parses rows from fieldlogger format into a dict
        :param f: File_data, often an open file or a list of rows without header
        :return: a list of dicts like [{date_time: x, sublocation: y, parametername: z, value: o}, ...]

        """
        observations = []
        for rownr, rawrow in enumerate(f):
            observation = {}
            row = utils.returnunicode(rawrow).rstrip(u'\r').rstrip(u'\n')
            if not row:
                continue
            cols = row.split(u';')
            observation[u'sublocation'] = cols[0]
            date = cols[1]
            time = cols[2]
            observation[u'date_time'] = datestring_to_date(u' '.join([date, time]))
            observation[u'value'] = cols[3]
            observation[u'parametername'] = cols[4]
            observations.append(observation)
        return observations
    def filter_dates_from_filedata(file_data, obsid_last_imported_dates, obsid_header_name='obsid', date_time_header_name='date_time'):
        """
        :param file_data: a list of lists like [['obsid', 'date_time', ...], [obsid1, date_time1, ...]]
        :param obsid_last_imported_dates: a dict like {'obsid1': last_date_in_db, ...}
        :param obsid_header_name: the name of the obsid header
        :param date_time_header_name: the name of the date_time header
        :return: A filtered list with only dates after last date is included for each obsid.

        >>> DiverofficeImport.filter_dates_from_filedata([['obsid', 'date_time'], ['obs1', '2016-09-28'], ['obs1', '2016-09-29']], {'obs1': [('2016-09-28', )]})
        [['obsid', 'date_time'], ['obs1', '2016-09-29']]
        """
        if len(file_data) == 1:
            return file_data

        obsid_idx = file_data[0].index(obsid_header_name)
        date_time_idx = file_data[0].index(date_time_header_name)
        filtered_file_data = [row for row in file_data[1:] if datestring_to_date(row[date_time_idx]) > datestring_to_date(obsid_last_imported_dates.get(row[obsid_idx], [('0001-01-01 00:00:00',)])[0][0])]

        filtered_file_data.reverse()
        filtered_file_data.append(file_data[0])
        filtered_file_data.reverse()
        return filtered_file_data
Example #41
0
 def test_prepare_w_flow_data(self, mock_flow_instruments, mock_instrument_not_found):
     mock_flow_instruments = [True, {}]
     mock_instrument_not_found.return_value.answer = 'ok'
     mock_instrument_not_found.return_value.value = 'inst1'
     observations = [{'sublocation': 'obs1.sub', 'obsid': 'obs1', 'flowtype': 'atype', 'date_time': datestring_to_date('2016-01-01 00:00'), 'unit': 'aunit', 'value': '123,4'}]
     test_string = create_test_string(FieldloggerImport.prepare_w_flow_data(observations))
     reference_string = '[[obsid, instrumentid, flowtype, date_time, reading, unit, comment], [obs1, inst1, atype, 2016-01-01 00:00:00, 123.4, aunit, ]]'
     assert test_string == reference_string
    def parse_hobologger_file(path, charset, skip_rows_without_water_level=False, begindate=None, enddate=None, tz_converter=None):
        """ Parses a HOBO temperature logger csv file into a string

        :param path: The file name
        :param charset:
        :param skip_rows_without_water_level:
        :param begindate:
        :param enddate:
        :param tz_converter: A TzConverter object.
        :return:

        """

        filedata = []
        location = None
        filename = os.path.basename(path)
        if begindate is not None:
            begindate = date_utils.datestring_to_date(begindate)
        if enddate is not None:
            enddate = date_utils.datestring_to_date(enddate)

        with open(path, 'rt', encoding=str(charset)) as f:
            rows_unsplit = [row.lstrip().rstrip('\n').rstrip('\r') for row in f]
            csvreader = csv.reader(rows_unsplit, delimiter=',', quotechar='"')

        rows = [ru(row, keep_containers=True) for row in csvreader]

        try:
            data_header_idx = [rownr for rownr, row in enumerate(rows) if 'Date Time' in '_'.join(row)][0]
        except IndexError:
            utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('Hobologger import',
                                                                                 '''File %s could not be parsed.'''))%filename)
            return [], filename, location

        date_colnr = [idx for idx, col in enumerate(rows[1]) if 'Date Time' in col]
        if not date_colnr:
            raise Exception(ru(QCoreApplication.translate('Hobologger import', 'Date Time column not found!')))
        else:
            date_colnr = date_colnr[0]

        if tz_converter:
            tz_string = get_tz_string(rows[1][date_colnr])
            if tz_string is None:
                utils.MessagebarAndLog.warning(
                    bar_msg=ru(QCoreApplication.translate('Hobologger import', 'Timezone not found in %s')) % filename)
            tz_converter.source_tz = tz_string

        temp_colnr = [idx for idx, col in enumerate(rows[1]) if 'Temp, °C' in col]
        if not temp_colnr:
            raise Exception(ru(QCoreApplication.translate('Hobologger import', 'Temperature column not found!')))
        else:
            temp_colnr = temp_colnr[0]

        match = re.search('LBL: ([A-Za-z0-9_\-]+)', rows[1][temp_colnr])
        if not match:
            location = filename
        else:
            location = match.group(1)

        new_header = ['date_time', 'head_cm', 'temp_degc', 'cond_mscm']
        filedata.append(new_header)

        try:
            first_data_row = rows[data_header_idx + 1]
        except IndexError:
            utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('HobologgerImport',
                                                                                 '''No data in file %s.'''))%filename)
            return [], filename, location
        else:
            dt = first_data_row[date_colnr]
            date_format = date_utils.find_date_format(dt, suppress_error_msg=True)
            if date_format is None:
                dt = first_data_row[date_colnr][:-2].rstrip()
                date_format = date_utils.find_date_format(dt)
                if date_format is None:
                    utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('HobologgerImport',
                                                                                         '''Dateformat in file %s could not be parsed.''')) % filename)
                    return [], filename, location

        filedata.extend([[date_utils.long_dateformat(fix_date(row[date_colnr], filename, tz_converter)),
                              '',
                              str(float(row[temp_colnr].replace(',', '.'))) if (
                              utils.to_float_or_none(row[temp_colnr]) if temp_colnr is not None else None) else '',
                              '']
                        for row in rows[data_header_idx + 1:]
                        if all([fix_date(row[date_colnr], filename, tz_converter) >= begindate if begindate is not None else True,
                                fix_date(row[date_colnr], filename, tz_converter) <= enddate if enddate is not None else True])])

        filedata = [row for row in filedata if any(row[1:])]

        return filedata, filename, location
Example #43
0
 def test_date_time_filter_observation_skip_to(self):
     datetimefilter = DateTimeFilter()
     datetimefilter.from_date = '2016-01-01'
     datetimefilter.to_date = '2016-01-10'
     observation = datetimefilter.alter_data({'date_time': datestring_to_date('2016-01-10')})
     assert observation is None
Example #44
0
    def test_import_w_levels_many_rows(self):
        file = [u'obsid,date_time,meas']
        base = datestring_to_date(u'1900-01-01 00:01:01')
        date_list = [base + datetime.timedelta(days=x) for x in range(0, 10000)]
        file.extend([u'rb1,' + datetime.datetime.strftime(adate, u'%Y%M%D %H%m') + u',0.5' for adate in date_list])

        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("rb1")''')

        with utils.tempinput(u'\n'.join(file), u'utf-8') as filename:
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
                    @mock.patch('import_data_to_db.utils.askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch.object(PyQt4.QtGui.QFileDialog, 'getOpenFileName')
                    def _test(self, filename, mock_filename, mock_skippopup, mock_encoding, mock_iface, mock_askuser):

                        mock_filename.return_value = filename
                        mock_encoding.return_value = [u'utf-8', True]

                        def side_effect(*args, **kwargs):
                            mock_result = mock.MagicMock()
                            if u'msg' in kwargs:
                                if kwargs[u'msg'].startswith(u'Does the file contain a header?'):
                                    mock_result.result = 1
                                    return mock_result
                            if len(args) > 1:
                                if args[1].startswith(u'Do you want to confirm'):
                                    mock_result.result = 0
                                    return mock_result
                                    #mock_askuser.return_value.result.return_value = 0
                                elif args[1].startswith(u'Do you want to import all'):
                                    mock_result.result = 0
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nForeign keys'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nThere are'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'It is a strong recommendation'):
                                    mock_result.result = 0
                                    return mock_result
                        mock_askuser.side_effect = side_effect

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = GeneralCsvImportGui(self.iface.mainWindow(), ms)
                        importer.load_gui()

                        importer.load_files()
                        importer.table_chooser.import_method = u'w_levels'

                        for column in importer.table_chooser.columns:
                            names = {u'obsid': u'obsid', u'date_time': u'date_time', u'meas': u'meas'}
                            if column.db_column in names:
                                column.file_column_name = names[column.db_column]

                        import_time = timeit.timeit(importer.start_import, number=1)
                        return import_time

                    import_time = _test(self, filename)
                    test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'''select count(*) from w_levels'''))
                    reference_string = ur'''(True, [(10000)])'''
                    assert import_time < 10
                    assert test_string == reference_string
    def match_ts_values(self, meas_ts, logger_ts, tolerance):
        """ Matches two timeseries values for shared timesteps 
        
            For every measurement point, a mean of logger values inside 
            measurementpoint + x minutes to measurementpoint - x minutes
            is coupled together.

            At the first used measurement, only logger values greater than
            the set start date is used.
            At the last measurement, only logger values lesser than the set end
            date is used.
            This is done so that values from another logger reposition is not
            mixed with the chosen logger positioning. (Hard to explain).
        """
        coupled_vals = []
        
        #Get the tolerance, default to 10 minutes
        tol = int(tolerance[0])
        tol_period = tolerance[1]
  
        logger_gen = utils.ts_gen(logger_ts)
        try:
            l = next(logger_gen)
        except StopIteration:
            return None
        log_vals = []

        all_done = False
        #The .replace(tzinfo=None) is used to remove info about timezone. Needed for the comparisons. This should not be a problem though as the date scale in the plot is based on the dates from the database. 
        outer_begin = self.FromDateTime.dateTime().toPyDateTime().replace(tzinfo=None)
        outer_end = self.ToDateTime.dateTime().toPyDateTime().replace(tzinfo=None)
        logger_step = datestring_to_date(l[0]).replace(tzinfo=None)
        for m in meas_ts:
            if logger_step is None:
                break
            meas_step = datestring_to_date(m[0]).replace(tzinfo=None)

            step_begin = dateshift(meas_step, -tol, tol_period)
            step_end = dateshift(meas_step, tol, tol_period)

            if step_end < outer_begin:
                continue
            if step_begin > outer_end:
                break

            #Skip logger steps that are earlier than the chosen begin date or are not inside the measurement period.
            while logger_step < step_begin or logger_step < outer_begin:
                try:
                    l = next(logger_gen)
                except StopIteration:
                    all_done = True
                    break
                logger_step = datestring_to_date(l[0]).replace(tzinfo=None)

            log_vals = []

            while logger_step is not None and logger_step <= step_end and logger_step <= outer_end:
                if not math.isnan(float(l[1])) or l[1] == 'nan' or l[1] == 'NULL':
                    log_vals.append(float(l[1]))
                try:
                    l = next(logger_gen)
                except StopIteration:
                    all_done = True
                    break
                logger_step = datestring_to_date(l[0]).replace(tzinfo=None)                     

            if log_vals:
                mean = np.mean(log_vals)
                if not math.isnan(mean):
                    coupled_vals.append((m[1], mean))
            if all_done:
                break
        return coupled_vals
Example #46
0
 def to_date(self, value):
     self.to_datetimeedit.setDateTime(datestring_to_date(value))
Example #47
0
 def to_date(self, value):
     self.to_datetimeedit.setDateTime(datestring_to_date(value))
Example #48
0
    def parse_hobologger_file(path, charset, skip_rows_without_water_level=False, begindate=None, enddate=None, tz_converter=None):
        """ Parses a HOBO temperature logger csv file into a string

        :param path: The file name
        :param charset:
        :param skip_rows_without_water_level:
        :param begindate:
        :param enddate:
        :param tz_converter: A TzConverter object.
        :return:

        """

        filedata = []
        location = None
        filename = os.path.basename(path)
        if begindate is not None:
            begindate = date_utils.datestring_to_date(begindate)
        if enddate is not None:
            enddate = date_utils.datestring_to_date(enddate)

        with open(path, 'rt', encoding=str(charset)) as f:
            rows_unsplit = [row.lstrip().rstrip('\n').rstrip('\r') for row in f]
            csvreader = csv.reader(rows_unsplit, delimiter=',', quotechar='"')

        rows = [ru(row, keep_containers=True) for row in csvreader]

        try:
            data_header_idx = [rownr for rownr, row in enumerate(rows) if 'Date Time' in '_'.join(row)][0]
        except IndexError:
            utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('Hobologger import',
                                                                                 '''File %s could not be parsed.'''))%filename)
            return [], filename, location

        date_colnr = [idx for idx, col in enumerate(rows[1]) if 'Date Time' in col]
        if not date_colnr:
            raise Exception(ru(QCoreApplication.translate('Hobologger import', 'Date Time column not found!')))
        else:
            date_colnr = date_colnr[0]

        if tz_converter:
            tz_string = get_tz_string(rows[1][date_colnr])
            if tz_string is None:
                utils.MessagebarAndLog.warning(
                    bar_msg=ru(QCoreApplication.translate('Hobologger import', 'Timezone not found in %s')) % filename)
            tz_converter.source_tz = tz_string

        temp_colnr = [idx for idx, col in enumerate(rows[1]) if 'Temp, °C' in col]
        if not temp_colnr:
            raise Exception(ru(QCoreApplication.translate('Hobologger import', 'Temperature column not found!')))
        else:
            temp_colnr = temp_colnr[0]

        match = re.search('LBL: ([A-Za-z0-9_\-]+)', rows[1][temp_colnr])
        if not match:
            location = filename
        else:
            location = match.group(1)

        new_header = ['date_time', 'head_cm', 'temp_degc', 'cond_mscm']
        filedata.append(new_header)

        try:
            first_data_row = rows[data_header_idx + 1]
        except IndexError:
            utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('HobologgerImport',
                                                                                 '''No data in file %s.'''))%filename)
            return [], filename, location
        else:
            dt = first_data_row[date_colnr]
            date_format = date_utils.find_date_format(dt, suppress_error_msg=True)
            if date_format is None:
                dt = first_data_row[date_colnr][:-2].rstrip()
                date_format = date_utils.find_date_format(dt)
                if date_format is None:
                    utils.MessagebarAndLog.warning(bar_msg=ru(QCoreApplication.translate('HobologgerImport',
                                                                                         '''Dateformat in file %s could not be parsed.''')) % filename)
                    return [], filename, location

        filedata.extend([[date_utils.long_dateformat(fix_date(row[date_colnr], filename, tz_converter)),
                              '',
                              str(float(row[temp_colnr].replace(',', '.'))) if (
                              utils.to_float_or_none(row[temp_colnr]) if temp_colnr is not None else None) else '',
                              '']
                        for row in rows[data_header_idx + 1:]
                        if all([fix_date(row[date_colnr], filename, tz_converter) >= begindate if begindate is not None else True,
                                fix_date(row[date_colnr], filename, tz_converter) <= enddate if enddate is not None else True])])

        filedata = [row for row in filedata if any(row[1:])]

        return filedata, filename, location
Example #49
0
    def parse_levelogger_file(path,
                              charset,
                              skip_rows_without_water_level=False,
                              begindate=None,
                              enddate=None):
        """ Parses a levelogger csv file into a string

        :param path: The file name
        :param existing_obsids: A list or tuple with the obsids that exist in the db.
        :param ask_for_names: (True/False) True to ask for location name for every location. False to only ask if the location is not found in existing_obsids.
        :return: A string representing a table file. Including '\n' for newlines.

        Assumptions and limitations:
        * The Location attribute is used as location and added as a column.
        * Values containing ',' is replaced with '.'
        * Rows with missing "Water head[cm]"-data is skipped.

        """
        #These can be set to paritally import files.
        #begindate = datetime.strptime('2016-06-08 20:00:00','%Y-%m-%d %H:%M:%S')
        #enddate = datetime.strptime('2016-06-08 19:00:00','%Y-%m-%d %H:%M:%S')

        #It should be possible to import all cols that exists in the translation dict

        filedata = []
        location = None
        level_unit_factor_to_cm = 100
        spec_cond_factor_to_mScm = 0.001
        filename = os.path.basename(path)
        if begindate is not None:
            begindate = date_utils.datestring_to_date(begindate)
        if enddate is not None:
            enddate = date_utils.datestring_to_date(enddate)

        with io.open(path, 'rt', encoding=str(charset)) as f:
            rows_unsplit = [
                row.lstrip().rstrip('\n').rstrip('\r') for row in f
            ]

        try:
            data_header_idx = [
                rownr for rownr, row in enumerate(rows_unsplit)
                if row.startswith('Date')
            ][0]
        except IndexError:
            utils.MessagebarAndLog.warning(bar_msg=ru(
                QCoreApplication.translate(
                    'LeveloggerImport', '''File %s could not be parsed.''')) %
                                           filename)
            return [], filename, location

        delimiter = utils.get_delimiter_from_file_rows(
            rows_unsplit[data_header_idx:],
            filename=filename,
            delimiters=[';', ','],
            num_fields=None)

        if delimiter is None:
            return [], filename, location

        rows = [row.split(';') for row in rows_unsplit]
        lens = set([len(row) for row in rows[data_header_idx:]])
        if len(lens) != 1 or list(lens)[0] == 1:
            # Assume that the delimiter was not ';'
            rows = [row.split(',') for row in rows_unsplit]

        col1 = [row[0] for row in rows]

        try:
            location_idx = col1.index('Location:')
        except ValueError:
            pass
        else:
            location = col1[location_idx + 1]

        try:
            level_unit_idx = col1.index('LEVEL')
        except ValueError:
            pass
        else:
            try:
                level_unit = col1[level_unit_idx + 1].split(':')[1].lstrip()
            except IndexError:
                pass
            else:
                if level_unit == 'cm':
                    level_unit_factor_to_cm = 1
                elif level_unit == 'm':
                    level_unit_factor_to_cm = 100
                else:
                    level_unit_factor_to_cm = 100
                    utils.MessagebarAndLog.warning(bar_msg=ru(
                        QCoreApplication.translate(
                            'LeveloggerImport',
                            '''The unit for level wasn't m or cm, a factor of %s was used. Check the imported data.'''
                        )) % str(level_unit_factor_to_cm))

        file_header = rows[data_header_idx]

        new_header = ['date_time', 'head_cm', 'temp_degc', 'cond_mscm']
        filedata.append(new_header)

        date_colnr = file_header.index('Date')
        time_colnr = file_header.index('Time')
        try:
            level_colnr = file_header.index('LEVEL')
        except ValueError:
            level_colnr = None
        try:
            temp_colnr = file_header.index('TEMPERATURE')
        except ValueError:
            temp_colnr = None
        try:
            spec_cond_colnr = file_header.index('spec. conductivity (uS/cm)')
        except ValueError:
            try:
                spec_cond_colnr = file_header.index(
                    'spec. conductivity (mS/cm)')
            except ValueError:
                spec_cond_colnr = None
            else:
                spec_cond_factor_to_mScm = 1
        else:
            spec_cond_factor_to_mScm = 0.001

        try:
            first_data_row = rows[data_header_idx + 1]
        except IndexError:
            utils.MessagebarAndLog.warning(bar_msg=ru(
                QCoreApplication.translate(
                    'LeveloggerImport', '''No data in file %s.''')) % filename)
            return [], filename, location
        else:
            date_str = ' '.join(
                [first_data_row[date_colnr], first_data_row[time_colnr]])
            date_format = date_utils.datestring_to_date(date_str)
            if date_format is None:
                utils.MessagebarAndLog.warning(bar_msg=ru(
                    QCoreApplication.translate(
                        'LeveloggerImport',
                        '''Dateformat in file %s could not be parsed.''')) %
                                               filename)
                return [], filename, location

        filedata.extend([[
            date_utils.long_dateformat(
                ' '.join([row[date_colnr], row[time_colnr]]), date_format),
            str(
                float(row[level_colnr].replace(',', '.')) *
                level_unit_factor_to_cm) if
            (utils.to_float_or_none(row[level_colnr]) is not None
             if level_colnr is not None else None) else None,
            str(float(row[temp_colnr].replace(',', '.'))) if
            (utils.to_float_or_none(row[temp_colnr])
             if temp_colnr is not None else None) else None,
            str(
                float(row[spec_cond_colnr].replace(',', '.')) *
                spec_cond_factor_to_mScm) if
            (utils.to_float_or_none(row[spec_cond_colnr])
             if spec_cond_colnr is not None else None) else None
        ] for row in rows[data_header_idx + 1:] if all([
            isinstance(utils.to_float_or_none(row[level_colnr]), float
                       ) if skip_rows_without_water_level else True,
            date_utils.datestring_to_date(
                ' '.join([row[date_colnr], row[time_colnr]]), df=date_format
            ) >= begindate if begindate is not None else True,
            date_utils.datestring_to_date(
                ' '.join([row[date_colnr], row[time_colnr]]), df=date_format
            ) <= enddate if enddate is not None else True
        ])])

        filedata = [row for row in filedata if any(row[1:])]

        return filedata, filename, location
Example #50
0
    def to_table(self, _data_dict):
        """
        Converts a parsed interlab4 dict into a table for w_qual_lab import

        :param _data_dict:A dict like {<lablittera>: {u'metadata': {u'metadataheader': value, ...}, <par1_name>: {u'dataheader': value, ...}}}
        :return: a list like [[u'obsid, depth, report, project, staff, date_time, anameth, reading_num, reading_txt, unit, comment'], rows with values]

        The translation from svensktvatten interlab4-keywords to w_qual_lab is from
        http://www.svensktvatten.se/globalassets/dricksvatten/riskanalys-och-provtagning/interlab-4-0.pdf

        """
        data_dict = copy.deepcopy(_data_dict)

        parameter_report_warning_messages = {}

        #### !!!! If a metadata-dbcolumn connection is changed, MetadataFilter.update_table.metaheader_dbcolumn_tooltips MUST be updated as well.

        file_data = [[
            u'obsid', u'depth', u'report', u'project', u'staff', u'date_time',
            u'anameth', u'parameter', u'reading_num', u'reading_txt', u'unit',
            u'comment'
        ]]
        for lablittera, lab_results in data_dict.iteritems():
            metadata = lab_results.pop(u'metadata')

            obsid = metadata[u'obsid']
            depth = None
            report = lablittera
            project = metadata.get(u'projekt', None)
            staff = metadata.get(u'provtagare', None)

            sampledate = metadata.get(u'provtagningsdatum', None)
            if sampledate is None:
                utils.MessagebarAndLog.info(log_msg=ru(
                    QCoreApplication.translate(
                        u'Interlab4Import',
                        u'Interlab4 import: There was no sample date found (column "provtagningsdatum") for lablittera %s. Importing without it.'
                    )) % lablittera)
                date_time = None
            else:
                sampletime = metadata.get(u'provtagningstid', None)
                if sampletime is not None:
                    date_time = datetime.strftime(
                        datestring_to_date(u' '.join([sampledate,
                                                      sampletime])),
                        u'%Y-%m-%d %H:%M:%S')
                else:
                    date_time = datetime.strftime(
                        datestring_to_date(sampledate), u'%Y-%m-%d %H:%M:%S')
                    utils.MessagebarAndLog.info(log_msg=ru(
                        QCoreApplication.translate(
                            u'Interlab4Import',
                            u'Interlab4 import: There was no sample time found (column "provtagningstid") for lablittera %s. Importing without it.'
                        )) % lablittera)

            meta_comment = metadata.get(u'kommentar', None)
            additional_meta_comments = [
                u'provtagningsorsak', u'provtyp', u'provtypspecifikation',
                u'bedömning', u'kemisk bedömning', u'mikrobiologisk bedömning',
                u'provplatsid', u'provplatsnamn', u'specifik provplats'
            ]

            #Only keep the comments that really has a value.
            more_meta_comments = u'. '.join([
                u': '.join([_x, metadata[_x]]) for _x in
                [_y for _y in additional_meta_comments if _y in metadata]
                if all([
                    metadata[_x], metadata[_x] is not None,
                    metadata[_x].lower() != u'ej bedömt', metadata[_x] != u'-'
                ])
            ])
            if not more_meta_comments:
                more_meta_comments = None

            for parameter, parameter_dict in lab_results.iteritems():
                anameth = parameter_dict.get(u'metodbeteckning', None)

                reading_num = parameter_dict.get(u'mätvärdetal', None)
                anm = parameter_dict.get(u'mätvärdetalanm', None)
                reading_txt = parameter_dict.get(u'mätvärdetext', None)

                if reading_num is None and reading_txt is not None:
                    _reading_txt_replaced = reading_txt.replace(
                        u'<', u'').replace(u'>', u'').replace(u',', u'.')
                    try:
                        float(_reading_txt_replaced)
                    except ValueError:
                        reading_num = None
                        if parameter not in parameter_report_warning_messages:
                            utils.MessagebarAndLog.warning(
                                bar_msg=ru(
                                    QCoreApplication.translate(
                                        u'Interlab4Import',
                                        u'Import interlab4 warning, see log message panel'
                                    )),
                                log_msg=ru(
                                    QCoreApplication.translate(
                                        u'Interlab4Import',
                                        u'Could not set reading_num for parameter %s for one or more reports/lablitteras (%s etc.)'
                                    )) % (parameter, lablittera))
                        parameter_report_warning_messages.setdefault(
                            parameter, []).append(report)
                    else:
                        reading_num = _reading_txt_replaced

                if reading_txt is None and reading_num is not None:
                    reading_txt = reading_num

                if anm is not None and reading_txt is not None:
                    if not reading_txt.startswith(anm):
                        reading_txt = anm + reading_txt

                unit = parameter_dict.get(u'enhet', None)
                parameter_comment = parameter_dict.get(u'kommentar', None)
                additional_parameter_comments = [
                    u'rapporteringsgräns', u'detektionsgräns', u'mätosäkerhet',
                    u'mätvärdespår', u'parameterbedömning'
                    #u'mätvärdetalanm' This is used for creating reading_txt
                ]
                more_parameter_comments = u'. '.join([
                    u': '.join([_x, parameter_dict[_x]]) for _x in [
                        _y for _y in additional_parameter_comments
                        if _y in parameter_dict
                    ] if all([
                        parameter_dict[_x], parameter_dict[_x] is not None,
                        parameter_dict[_x].lower() != u'ej bedömt',
                        parameter_dict[_x] != u'-'
                    ])
                ])

                file_data.append([
                    obsid, depth, report, project, staff, date_time, anameth,
                    parameter, reading_num, reading_txt, unit, u'. '.join([
                        comment for comment in [
                            parameter_comment, meta_comment,
                            more_meta_comments, more_parameter_comments
                        ] if comment is not None and comment
                    ])
                ])

        for parameter, reports in sorted(
                parameter_report_warning_messages.iteritems()):
            utils.MessagebarAndLog.info(log_msg=ru(
                QCoreApplication.translate(
                    u'Interlab4Import',
                    u'reading_num could not be set for parameter %s for reports %s'
                )) % (parameter, u', '.join(reports)))

        return file_data