def test_add_triggers_not_change_existing(self):
     """ Adding triggers should not automatically change the db """
     utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "east", "north") VALUES ('rb1', 1, 1)''')
     test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
     utils.add_triggers_to_obs_points()
     reference_string = u'(True, [(rb1, 1.0, 1.0, None)])'
     assert test_string == reference_string
    def calcall(self):
        fr_d_t = self.FromDateTime.dateTime().toPyDateTime()
        to_d_t = self.ToDateTime.dateTime().toPyDateTime()
        
#        sanity1 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.h_toc""")[1]
        sanity1 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid""")[1]
        sanity2 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.h_toc NOT NULL""")[1]
        
        if len(sanity1) == len(sanity2): #only if h_toc exists for all objects!!
            sql1 = """UPDATE OR IGNORE w_levels SET h_toc = (SELECT obs_points.h_toc FROM obs_points WHERE w_levels.obsid = obs_points.obsid) WHERE """
            sql1 += """date_time >= '"""
            sql1 += str(fr_d_t)
            sql1 += """' AND date_time <= '"""
            sql1 += str(to_d_t)
            sql1 += """' """
            utils.sql_alter_db(sql1)
            sql2 = """UPDATE OR IGNORE w_levels SET level_masl = h_toc - meas WHERE """
            sql2 += """date_time >= '"""
            sql2 += str(fr_d_t)
            sql2 += """' AND date_time <= '"""
            sql2 += str(to_d_t)
            sql2 += """' """        
            utils.sql_alter_db(sql2)
            self.close()
        else:
            utils.pop_up_info('Calculation aborted! There seems to be NULL values in your table obs_points, column h_toc.','Error')
            self.close()
    def delete_selected_range(self, table_name):
        """ Deletes the current selected range from the database from w_levels_logger
        :return: De
        """
        current_loaded_obsid = self.obsid
        selected_obsid = self.load_obsid_and_init()
        if current_loaded_obsid != selected_obsid:
            utils.pop_up_info("Error!\n The obsid selection has been changed but the plot has not been updated. No deletion done.\nUpdating plot.")
            self.update_plot()
            return

        fr_d_t = str((self.FromDateTime.dateTime().toPyDateTime() - datetime.datetime(1970,1,1)).total_seconds())
        to_d_t = str((self.ToDateTime.dateTime().toPyDateTime() - datetime.datetime(1970,1,1)).total_seconds())

        sql_list = []
        sql_list.append(r"""delete from "%s" """%table_name)
        sql_list.append(r"""where obsid = '%s' """%selected_obsid)
        sql_list.append(r"""AND CAST(strftime('%s', date_time) AS NUMERIC) """)
        sql_list.append(r""" >= '%s' """%fr_d_t)
        sql_list.append(r"""AND CAST(strftime('%s', date_time) AS NUMERIC) """)
        sql_list.append(r""" <= '%s' """%to_d_t)
        sql = ''.join(sql_list)

        really_delete = utils.askuser("YesNo", "Do you want to delete the period " +
                                      str(self.FromDateTime.dateTime().toPyDateTime()) + " to " +
                                      str(self.ToDateTime.dateTime().toPyDateTime()) +
                                      " for obsid " + selected_obsid + " from table " + table_name + "?").result
        if really_delete:
            utils.sql_alter_db(sql)
            self.update_plot()
 def do_it(self):
     """Update coordinates for all observations in self.observations"""
     
     sql = r"""UPDATE OR IGNORE obs_points SET east=X(Geometry) WHERE obsid IN """ + self.sqlpart2
     utils.sql_alter_db(sql)
     sql = r"""UPDATE OR IGNORE obs_points SET north=Y(Geometry) WHERE obsid IN """ + self.sqlpart2
     utils.sql_alter_db(sql)
    def calcselected(self):
        obsar = utils.getselectedobjectnames(self.layer)
        observations = obsar
        i=0
        for obs in obsar:
                observations[i] = obs.encode('utf-8') #turn into a list of python byte strings
                i += 1        
        fr_d_t = self.FromDateTime.dateTime().toPyDateTime()
        to_d_t = self.ToDateTime.dateTime().toPyDateTime()

        sanity1 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.obsid IN """ + (str(observations)).encode('utf-8').replace('[','(').replace(']',')'))[1]
        sanity2 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.h_toc NOT NULL  AND obs_points.obsid IN """ + (str(observations)).encode('utf-8').replace('[','(').replace(']',')'))[1]

        if len(sanity1) == len(sanity2): #only if h_toc exists for all objects
            sql1 = """UPDATE OR IGNORE w_levels SET h_toc = (SELECT obs_points.h_toc FROM obs_points WHERE w_levels.obsid = obs_points.obsid) WHERE obsid IN """
            sql1 += str(observations)
            sql1 += """ AND date_time >= '"""
            sql1 += str(fr_d_t)
            sql1 += """' AND date_time <= '"""
            sql1 += str(to_d_t)
            sql1 += """' """   
            utils.sql_alter_db(sql1.replace("[","(").replace("]",")"))
            sql2 = """UPDATE OR IGNORE w_levels SET level_masl = h_toc - meas WHERE obsid IN """
            sql2 += str(observations)
            sql2 += """ AND date_time >= '"""
            sql2 += str(fr_d_t)
            sql2 += """' AND date_time <= '"""
            sql2 += str(to_d_t)
            sql2 += """' """        
            utils.sql_alter_db(sql2.replace("[","(").replace("]",")"))
            self.close()        
        else:
            utils.pop_up_info('Calculation aborted! There seems to be NULL values in your table obs_points, column h_toc.','Error')
            self.close()
    def test_add_east_north_from_geometry(self):
        """ Test that adding triggers and adding obsid with geometry also adds east, north
        :return:
        """
        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb1', GeomFromText('POINT(1.0 1.0)', 3006))""")

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 1.0, POINT(1 1))])'
        assert test_string == reference_string
Ejemplo n.º 7
0
    def test_add_triggers_not_deleting_geom_when_east_north_null(self):
        """ Adding triggers should not automatically delete geometry when east AND north is NULL """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb1', GeomFromText('POINT(1.0 1.0)', 3006))""")
        #After the first: u'(True, [(rb1, None, None, POINT(1 1))])

        utils.add_triggers_to_obs_points()

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, None, POINT(1 1))])'
        assert test_string == reference_string
Ejemplo n.º 8
0
    def test_add_triggers_add_east_north(self):
        """ Updating coordinates from NULL should create geometry. """
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "east", "north") VALUES ('rb1', NULL, NULL)''')

        utils.add_triggers_to_obs_points()

        utils.sql_alter_db(u"""update obs_points set east='1.0', north='2.0'""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 2.0, POINT(1 2))])'
        assert test_string == reference_string
    def test_add_geometry_from_east_north(self):
        """ Test that adding triggers and adding obsid with east, north also adds geometry
        :return:
        """
        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "east", "north") VALUES ('rb1', 1, 1)''')

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 1.0, POINT(1 1))])'
        assert test_string == reference_string
Ejemplo n.º 10
0
    def test_add_trigger_add_obsid_without_anything(self):
        """ Test that adding triggers and updating obsid from east, north don't set null values for previous obsid.
        :return:
        """
        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid) VALUES ('rb1')""")
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid) VALUES ('rb2')""")

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, None, None), (rb2, None, None, None)])'
        assert test_string == reference_string
 def do_it(self):
     """Update positions for all observations in self.observations"""
     # First find EPSG-ID for the CRS
     sql = r"""SELECT srid FROM geometry_columns where f_table_name = 'obs_points'"""
     ConnectionOK, result = utils.sql_load_fr_db(sql)
     EPSGID= str(result[0][0])
     #Then do the operation
     sql = r"""Update or ignore 'obs_points' SET Geometry=MakePoint(east, north, """
     sql += EPSGID
     sql += """) WHERE obsid IN """ + self.sqlpart2
     utils.sql_alter_db(sql)
Ejemplo n.º 12
0
    def test_add_trigger_add_east_north_not_nulling_east_north(self):
        """ Test that adding triggers and adding obsid from east, north don't set null values for previous obsid.
        :return:
        """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north) VALUES ('rb1', 1, 1)""")

        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north) VALUES ('rb2', 2, 2)""")

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 1.0, None), (rb2, 2.0, 2.0, POINT(2 2))])'
        assert test_string == reference_string
Ejemplo n.º 13
0
    def test_calcall(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "h_toc") VALUES ('rb1', 1)''')
        utils.sql_alter_db(u'''INSERT into w_levels ("obsid", "meas", "date_time") VALUES ('rb1', 222, '2005-01-01 00:00:00')''')
        self.calclvl.FromDateTime = QtGui.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(datestring_to_date(u'2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtGui.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(datestring_to_date(u'2010-01-01 00:00:00'))
        self.calclvl.calcall()

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, date_time, meas, h_toc, level_masl from w_levels'))
        reference_string = u'(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0)])'
        assert test_string == reference_string
Ejemplo n.º 14
0
    def test_add_trigger_add_geometry_not_nulling_geometry(self):
        """ Test that adding triggers and adding obsid don't set null values for previous obsid.
        :return:
        """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb1', GeomFromText('POINT(1.0 1.0)', 3006))""")
        #After the first: u'(True, [(rb1, None, None, POINT(1 1))])

        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb2', GeomFromText('POINT(2.0 2.0)', 3006))""")
        #After the second: u'(True, [(rb1, 1.0, 1.0, POINT(1 1)), (rb2, 2.0, 2.0, POINT(2 2))])

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, None, POINT(1 1)), (rb2, 2.0, 2.0, POINT(2 2))])'
        assert test_string == reference_string
Ejemplo n.º 15
0
    def test_plot_section_with_depth(self, mock_qgsproject_instance):
        mock_qgsproject_instance.return_value.readEntry = MIDV_DICT
        utils.sql_alter_db(u'''insert into obs_lines (obsid, geometry) values ("L1", GeomFromText('LINESTRING(633466.711659 6720684.24498, 633599.530455 6720727.016568)', 3006))''')
        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry, length) values ("P1", GeomFromText('POINT(633466, 711659)', 3006), 2)''')
        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry, length) values ("P2", GeomFromText('POINT(6720727, 016568)', 3006), "1")''')
        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry, length) values ("P3", GeomFromText('POINT(6720727, 016568)', 3006), NULL)''')

        uri = QgsDataSourceURI()
        uri.setDatabase(TEMP_DB_PATH)
        uri.setDataSource('', 'obs_lines', 'geometry', '', 'obsid')

        self.vlayer = QgsVectorLayer(uri.uri(), 'TestLayer', 'spatialite')
        features = self.vlayer.getFeatures()
        for feature in features:
            featureid = feature.id()

        self.vlayer.setSelectedFeatures([featureid])

        @mock.patch('midvatten_utils.getselectedobjectnames', autospec=True)
        @mock.patch('qgis.utils.iface', autospec=True)
        def _test_plot_section_with_depth(self, mock_iface, mock_getselectedobjectnames):
            mock_iface.mapCanvas.return_value.currentLayer.return_value = self.vlayer
            mock_getselectedobjectnames.return_value = (u'P1', u'P2', u'P3')
            mock_mapcanvas = mock_iface.mapCanvas.return_value
            mock_mapcanvas.layerCount.return_value = 0
            self.midvatten.plot_section()
        _test_plot_section_with_depth(self)
 def calibrateandplot(self):
     obsid = unicode(self.combobox_obsid.currentText())
     if not obsid=='':
         sanity1sql = """select count(obsid) from w_levels_logger where obsid = '""" +  obsid[0] + """'"""
         sanity2sql = """select count(obsid) from w_levels_logger where head_cm not null and head_cm !='' and obsid = '""" +  obsid[0] + """'"""
         if utils.sql_load_fr_db(sanity1sql)[1] == utils.sql_load_fr_db(sanity2sql)[1]: # This must only be done if head_cm exists for all data
             fr_d_t = self.FromDateTime.dateTime().toPyDateTime()
             to_d_t = self.ToDateTime.dateTime().toPyDateTime()
             newzref = self.LoggerPos.text()
             if len(newzref)>0:
                 sql =r"""UPDATE w_levels_logger SET level_masl = """
                 sql += str(newzref)
                 sql += """ + head_cm / 100 WHERE obsid = '"""
                 sql += obsid   
                 sql += """' AND date_time >= '"""
                 sql += str(fr_d_t)
                 sql += """' AND date_time <= '"""
                 sql += str(to_d_t)
                 sql += """' """
                 dummy = utils.sql_alter_db(sql)
             self.CalibrationPlot(obsid)
             self.getlastcalibration()
         else:
             utils.pop_up_info("Calibration aborted!!\nThere must not be empty cells or\nnull values in the 'head_cm' column!")
     else:
         self.INFO.setText("Select the observation point with logger data to be calibrated.")
Ejemplo n.º 17
0
 def setUp(self):
     self.iface = DummyInterface()
     self.midvatten = midvatten.midvatten(self.iface)
     try:
         os.remove(TestObsPointsTriggers.temp_db_path)
     except OSError:
         pass
     self.midvatten.new_db()
     self.importinstance = midv_data_importer()
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_geom_fr_coords""")
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_geom_fr_coords""")
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_coords_fr_geom""")
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_coords_fr_geom""")
Ejemplo n.º 18
0
 def setUp(self, mock_locale):
     self.iface = DummyInterface()
     self.midvatten = midvatten(self.iface)
     try:
         os.remove(TestObsPointsTriggers.temp_db_path)
     except OSError:
         pass
     mock_locale.return_value.answer = u'ok'
     mock_locale.return_value.value = u'sv_SE'
     self.midvatten.new_db()
     self.importinstance = midv_data_importer()
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_geom_fr_coords""")
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_geom_fr_coords""")
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_insert_obs_points_coords_fr_geom""")
     utils.sql_alter_db(u"""DROP TRIGGER IF EXISTS after_update_obs_points_coords_fr_geom""")
Ejemplo n.º 19
0
    def test_staff_not_given(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1")''')

        f = [
            u"Location;date_time;value;comment\n",
            u"Rb1202.sample;30-03-2016;15:31:30;hej2;s.comment\n",
            u"Rb1608.level;30-03-2016;15:34:40;testc;l.comment\n",
            u"Rb1615.flow;30-03-2016;15:30:09;357;f.Accvol.m3\n",
            u"Rb1615.flow;30-03-2016;15:30:09;gick bra;f.comment\n",
            u"Rb1608.level;30-03-2016;15:34:13;ergv;l.comment\n",
            u"Rb1608.level;30-03-2016;15:34:13;555;l.meas.m\n",
            u"Rb1512.sample;30-03-2016;15:31:30;899;s.turbiditet.FNU\n",
            u"Rb1505.quality;30-03-2016;15:29:26;hej;q.comment\n",
            u"Rb1505.quality;30-03-2016;15:29:26;863;q.konduktivitet.µS/cm\n",
            u"Rb1512.quality;30-03-2016;15:30:39;test;q.comment\n",
            u"Rb1512.quality;30-03-2016;15:30:39;67;q.syre.mg/L\n",
            u"Rb1512.quality;30-03-2016;15:30:39;8;q.temperatur.grC\n",
            u"Rb1512.quality;30-03-2016;15:30:40;58;q.syre.%\n",
            ]

        with utils.tempinput(''.join(f)) as filename:
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            @mock.patch('import_fieldlogger.utils.QtGui.QFileDialog.getOpenFileNames')
            @mock.patch('import_fieldlogger.utils.QtGui.QInputDialog.getText')
            @mock.patch('import_fieldlogger.utils.MessagebarAndLog')
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            def _test_staff_not_given(self, filename, mock_MessagebarAndLog, mock_charset, mock_savefilename ):
                mock_charset.return_value = ('utf-8', True)
                mock_savefilename.return_value = [filename]

                ms = MagicMock()
                ms.settingsdict = OrderedDict()
                importer = FieldloggerImport(self.iface.mainWindow(), ms)
                importer.parse_observations_and_populate_gui()

                importer.start_import(importer.observations)
                mock_MessagebarAndLog.critical.assert_called_with(bar_msg=u'Import error, staff not given')
 def update_level_masl_from_head(self, obsid, fr_d_t, to_d_t, newzref):
     """ Updates the level masl using newzref
     :param obsid: (str) The obsid
     :param fr_d_t: (datetime) start of calibration
     :param to_d_t: (datetime) end of calibration
     :param newzref: (int/float/str [m]) The correction that should be made against the head [m]
     :return: None
     """
     sql =r"""UPDATE w_levels_logger SET level_masl = """
     sql += str(newzref)
     sql += """ + head_cm / 100 WHERE obsid = '"""
     sql += obsid
     # Sqlite seems to have problems with date comparison date_time >= a_date, so they have to be converted into total seconds first.
     sql += """' AND CAST(strftime('%s', date_time) AS NUMERIC) >= """
     sql += str((fr_d_t - datetime.datetime(1970,1,1)).total_seconds())
     sql += """ AND CAST(strftime('%s', date_time) AS NUMERIC) <= """
     sql += str((to_d_t - datetime.datetime(1970,1,1)).total_seconds())
     sql += """ """
     dummy = utils.sql_alter_db(sql)
Ejemplo n.º 21
0
    def test_update_trigger_add_east_north_not_nulling_east_north(self):
        """ Test that adding triggers and updating obsid from east, north don't set null values for previous obsid.
        :return:
        """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north, geometry) VALUES ('rb1', 1, 1, GeomFromText('POINT(1.0 1.0)', 3006))""")
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north, geometry) VALUES ('rb2', 2, 2, GeomFromText('POINT(2.0 2.0)', 3006))""")

        utils.add_triggers_to_obs_points()

        utils.sql_alter_db(u'''UPDATE obs_points SET east = 3, north = 3 WHERE (obsid = 'rb1')''')

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 3.0, 3.0, POINT(3 3)), (rb2, 2.0, 2.0, POINT(2 2))])'
        assert test_string == reference_string
Ejemplo n.º 22
0
    def test_plot_section(self, mock_qgsproject_instance):
        mock_qgsproject_instance.return_value.readEntry = MIDV_DICT

        """For now, the test only initiates the plot. Check that it does not crash """
        utils.sql_alter_db(u'''insert into obs_lines (obsid, geometry) values ("L1", GeomFromText('LINESTRING(633466.711659 6720684.24498, 633599.530455 6720727.016568)', 3006))''')
        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry) values ("P1", GeomFromText('POINT(633466, 711659)', 3006))''')
        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry) values ("P2", GeomFromText('POINT(6720727, 016568)', 3006))''')
        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry) values ("P3", GeomFromText('POINT(6720728, 016569)', 3006))''')

        uri = QgsDataSourceURI()
        uri.setDatabase(TEMP_DB_PATH)
        uri.setDataSource('', 'obs_lines', 'geometry', '', 'obsid')

        self.vlayer = QgsVectorLayer(uri.uri(), 'TestLayer', 'spatialite')
        features = self.vlayer.getFeatures()
        for feature in features:
            featureid = feature.id()

        self.vlayer.setSelectedFeatures([featureid])

        @mock.patch('midvatten_utils.getselectedobjectnames', autospec=True)
        @mock.patch('qgis.utils.iface', autospec=True)
        def _test_plot_section(self, mock_iface, mock_getselectedobjectnames):
            mock_iface.mapCanvas.return_value.currentLayer.return_value = self.vlayer
            mock_getselectedobjectnames.return_value = (u'P1', u'P2', u'P3')
            mock_mapcanvas = mock_iface.mapCanvas.return_value
            mock_mapcanvas.layerCount.return_value = 0
            self.midvatten.plot_section()
            self.myplot = self.midvatten.myplot
            self.myplot.drillstoplineEdit.setText(u"%berg%")
            self.myplot.draw_plot()
            self.selected_obsids = self.myplot.selected_obsids
        _test_plot_section(self)

        assert self.myplot.drillstoplineEdit.text() == u'%berg%'
        assert utils_for_tests.create_test_string(self.myplot.selected_obsids) == "['P1' 'P2' 'P3']"
Ejemplo n.º 23
0
    def test_import_vlf_data_obsid_not_in_db(self):
        file = [u'obsid,length2,real_comp,imag_comp,comment',
                u'obsid2,500,2,10,acomment']

        utils.sql_alter_db(u'INSERT INTO obs_lines ("obsid") VALUES ("obsid1")')

        with utils.tempinput(u'\n'.join(file), u'utf-8') as filename:
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('midvatten_utils.NotFoundQuestion', autospec=True)
                    @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
                    @mock.patch('import_data_to_db.utils.askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch.object(PyQt4.QtGui.QFileDialog, 'getOpenFileName')
                    def _test(self, filename, mock_filename, mock_skippopup, mock_encoding, mock_iface, mock_askuser, mock_notfound):

                        mock_filename.return_value = filename
                        mock_encoding.return_value = [u'utf-8', True]

                        mock_notfound.return_value.answer = u'ok'
                        mock_notfound.return_value.value = u'obsid1'

                        def side_effect(*args, **kwargs):
                            mock_result = mock.MagicMock()
                            if u'msg' in kwargs:
                                if kwargs[u'msg'].startswith(u'Does the file contain a header?'):
                                    mock_result.result = 1
                                    return mock_result
                            if len(args) > 1:
                                if args[1].startswith(u'Do you want to confirm'):
                                    mock_result.result = 0
                                    return mock_result
                                    #mock_askuser.return_value.result.return_value = 0
                                elif args[1].startswith(u'Do you want to import all'):
                                    mock_result.result = 0
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nForeign keys'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nThere are'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'It is a strong recommendation'):
                                    mock_result.result = 0
                                    return mock_result
                        mock_askuser.side_effect = side_effect

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = GeneralCsvImportGui(self.iface.mainWindow(), ms)
                        importer.load_gui()

                        importer.load_files()
                        importer.table_chooser.import_method = u'vlf_data'

                        for column in importer.table_chooser.columns:
                            names = {u'obsid': u'obsid', u'length': u'length2', u'real_comp': u'real_comp', u'imag_comp': u'imag_comp', u'comment': u'comment'}
                            if column.db_column in names:
                                column.file_column_name = names[column.db_column]

                        importer.start_import()

                    _test(self, filename)
                    test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'''select * from vlf_data'''))
                    reference_string = u'''(True, [(obsid1, 500.0, 2.0, 10.0, acomment)])'''
                    assert test_string == reference_string
Ejemplo n.º 24
0
    def test_import_w_levels_many_rows(self):
        file = [u'obsid,date_time,meas']
        base = datestring_to_date(u'1900-01-01 00:01:01')
        date_list = [base + datetime.timedelta(days=x) for x in range(0, 10000)]
        file.extend([u'rb1,' + datetime.datetime.strftime(adate, u'%Y%M%D %H%m') + u',0.5' for adate in date_list])

        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("rb1")''')

        with utils.tempinput(u'\n'.join(file), u'utf-8') as filename:
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
                    @mock.patch('import_data_to_db.utils.askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch.object(PyQt4.QtGui.QFileDialog, 'getOpenFileName')
                    def _test(self, filename, mock_filename, mock_skippopup, mock_encoding, mock_iface, mock_askuser):

                        mock_filename.return_value = filename
                        mock_encoding.return_value = [u'utf-8', True]

                        def side_effect(*args, **kwargs):
                            mock_result = mock.MagicMock()
                            if u'msg' in kwargs:
                                if kwargs[u'msg'].startswith(u'Does the file contain a header?'):
                                    mock_result.result = 1
                                    return mock_result
                            if len(args) > 1:
                                if args[1].startswith(u'Do you want to confirm'):
                                    mock_result.result = 0
                                    return mock_result
                                    #mock_askuser.return_value.result.return_value = 0
                                elif args[1].startswith(u'Do you want to import all'):
                                    mock_result.result = 0
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nForeign keys'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nThere are'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'It is a strong recommendation'):
                                    mock_result.result = 0
                                    return mock_result
                        mock_askuser.side_effect = side_effect

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = GeneralCsvImportGui(self.iface.mainWindow(), ms)
                        importer.load_gui()

                        importer.load_files()
                        importer.table_chooser.import_method = u'w_levels'

                        for column in importer.table_chooser.columns:
                            names = {u'obsid': u'obsid', u'date_time': u'date_time', u'meas': u'meas'}
                            if column.db_column in names:
                                column.file_column_name = names[column.db_column]

                        import_time = timeit.timeit(importer.start_import, number=1)
                        return import_time

                    import_time = _test(self, filename)
                    test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'''select count(*) from w_levels'''))
                    reference_string = ur'''(True, [(10000)])'''
                    assert import_time < 10
                    assert test_string == reference_string
    def test_export_spatialite(self, mock_iface, mock_find_layer, mock_newdbpath, mock_verify):

        mock_find_layer.return_value.crs.return_value.authid.return_value = u'EPSG:3006'

        mock_newdbpath.return_value = EXPORT_DB_PATH
        mock_verify.return_value = 0

        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry) values ("P1", GeomFromText('POINT(633466, 711659)', 3006))''')
        utils.sql_alter_db(u'''insert into zz_staff (staff) values ('s1')''')
        utils.sql_alter_db(u'''insert into comments (obsid, date_time, staff, comment) values ('P1', '2015-01-01 00:00:00', 's1', 'comment1')''')
        utils.sql_alter_db(u'''insert into w_qual_lab (obsid, parameter, report, staff) values ('P1', 'labpar1', 'report1', 's1')''')
        utils.sql_alter_db(u'''insert into zz_w_qual_field_parameters (parameter, unit, shortname) values ('par1', 'unit1', 'parshortname1')''')
        utils.sql_alter_db(u'''insert into w_qual_field (obsid, parameter, staff, date_time, unit) values ('P1', 'par1', 's1', '2015-01-01 01:00:00', 'unit1')''')
        utils.sql_alter_db(u'''insert into w_flow (obsid, instrumentid, flowtype, date_time, unit) values ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')''')
        utils.sql_alter_db(u'''insert into w_levels (obsid, date_time, meas) values ('P1', '2015-01-02 00:00:01', '2')''')
        utils.sql_alter_db(u'''insert into stratigraphy (obsid, stratid) values ('P1', 'strat1')''')
        utils.sql_alter_db(u'''insert into obs_lines (obsid) values ('L1')''')
        utils.sql_alter_db(u'''insert into seismic_data (obsid, length) values ('L1', '5')''')
        utils.sql_alter_db(u'''insert into meteo (obsid, instrumentid, parameter, date_time) values ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')''')

        self.midvatten.export_spatialite()

        sql_list = [u'''select obsid from obs_points''',
                    u'''select staff from zz_staff''',
                    u'''select obsid, date_time, staff, comment from comments''',
                    u'''select obsid, parameter, report, staff from w_qual_lab''',
                    u'''select parameter, unit, shortname from zz_w_qual_field_parameters''',
                    u'''select obsid, parameter, staff, date_time, comment from w_qual_field''',
                    u'''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
                    u'''select obsid, date_time, meas from w_levels''',
                    u'''select obsid, stratid from stratigraphy''',
                    u'''select obsid from obs_lines''',
                    u'''select obsid, length from seismic_data''',
                    u'''select obsid, instrumentid, parameter, date_time from meteo''']


        conn = sqlite.connect(EXPORT_DB_PATH, detect_types=sqlite.PARSE_DECLTYPES|sqlite.PARSE_COLNAMES)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)
        reference_string = [u'''[''',
                            u'''select obsid from obs_points''',
                            u''', [(P1)], ''',
                            u'''select staff from zz_staff''',
                            u''', [(s1)], ''',
                            u'''select obsid, date_time, staff, comment from comments''',
                            u''', [(P1, 2015-01-01 00:00:00, s1, comment1)], ''',
                            u'''select obsid, parameter, report, staff from w_qual_lab''',
                            u''', [(P1, labpar1, report1, s1)], ''',
                            u'''select parameter, unit, shortname from zz_w_qual_field_parameters''',
                            u''', [(DO, %, DO), (DO, mg/L, DO), (conductivity, µS/cm, cond), (pH, None, pH), (reduction potential, mV, redox), (temperature, grC, temp), (turbidity, FNU, turb), (par1, unit1, None)], ''',
                            u'''select obsid, parameter, staff, date_time, comment from w_qual_field''',
                            u''', [(P1, par1, s1, 2015-01-01 01:00:00, None)], ''',
                            u'''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
                            u''', [(P1, inst1, Momflow, 2015-04-13 00:00:00, l/s)], ''',
                            u'''select obsid, date_time, meas from w_levels''',
                            u''', [(P1, 2015-01-02 00:00:01, 2.0)], ''',
                            u'''select obsid, stratid from stratigraphy''',
                            u''', [(P1, strat1)], ''',
                            u'''select obsid from obs_lines''',
                            u''', [(L1)], ''',
                            u'''select obsid, length from seismic_data''',
                            u''', [(L1, 5.0)], ''',
                            u'''select obsid, instrumentid, parameter, date_time from meteo''',
                            u''', [(P1, meteoinst, precip, 2017-01-01 00:19:00)]]''']
        reference_string = u'\n'.join(reference_string)
        assert test_string == reference_string
    def test_export_csv(self, mock_iface, mock_savepath):
        mock_savepath.return_value = u'/tmp/'
        utils.sql_alter_db(u'''insert into obs_points (obsid, geometry) values ("P1", GeomFromText('POINT(633466, 711659)', 3006))''')
        utils.sql_alter_db(u'''insert into zz_staff (staff) values ('s1')''')
        utils.sql_alter_db(u'''insert into comments (obsid, date_time, staff, comment) values ('P1', '2015-01-01 00:00:00', 's1', 'comment1')''')
        utils.sql_alter_db(u'''insert into w_qual_lab (obsid, parameter, report, staff) values ('P1', 'labpar1', 'report1', 's1')''')
        utils.sql_alter_db(u'''insert into zz_w_qual_field_parameters (parameter, unit, shortname) values ('par1', 'unit1', 'parshortname1')''')
        utils.sql_alter_db(u'''insert into w_qual_field (obsid, parameter, staff, date_time) values ('P1', 'labpar1', 's1', '2015-01-01 01:00:00')''')
        utils.sql_alter_db(u'''insert into w_flow (obsid, instrumentid, flowtype, date_time, unit) values ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')''')
        utils.sql_alter_db(u'''insert into w_levels (obsid, date_time, meas) values ('P1', '2015-01-02 00:00:01', '2')''')
        utils.sql_alter_db(u'''insert into stratigraphy (obsid, stratid) values ('P1', 'strat1')''')
        utils.sql_alter_db(u'''insert into obs_lines (obsid) values ('L1')''')
        utils.sql_alter_db(u'''insert into seismic_data (obsid, length) values ('L1', '5')''')
        utils.sql_alter_db(u'''insert into meteo (obsid, instrumentid, parameter, date_time) values ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')''')

        self.midvatten.export_csv()

        file_contents = []
        for filename in TestExport.exported_csv_files_no_zz:
            with io.open(filename, 'r', encoding='utf-8') as f:
                file_contents.append(os.path.basename(filename) + '\n')
                file_contents.append([l.replace('\r', '') for l in f])
        test_string = utils_for_tests.create_test_string(file_contents)

        with io.open('/tmp/refstring.txt', 'w', encoding='utf-8') as of:
            of.write(test_string)

        reference_string = '\n'.join([
            "[obs_points.csv",
            ", [obsid;name;place;type;length;drillstop;diam;material;screen;capacity;drilldate;wmeas_yn;wlogg_yn;east;north;ne_accur;ne_source;h_toc;h_tocags;h_gs;h_accur;h_syst;h_source;source;com_onerow;com_html;geometry",
            ", P1;;;;;;;;;;;;;;;;;;;;;;;;;;",
            "], comments.csv",
            ", [obsid;date_time;comment;staff",
            ", P1;2015-01-01 00:00:00;comment1;s1",
            "], w_levels.csv",
            ", [obsid;date_time;meas;h_toc;level_masl;comment",
            ", P1;2015-01-02 00:00:01;2.0;;;",
            "], w_flow.csv",
            ", [obsid;instrumentid;flowtype;date_time;reading;unit;comment",
            ", P1;inst1;Momflow;2015-04-13 00:00:00;;l/s;",
            "], w_qual_lab.csv",
            ", [obsid;depth;report;project;staff;date_time;anameth;parameter;reading_num;reading_txt;unit;comment",
            ", P1;;report1;;s1;;;labpar1;;;;",
            "], w_qual_field.csv",
            ", [obsid;staff;date_time;instrument;parameter;reading_num;reading_txt;unit;comment",
            ", P1;s1;2015-01-01 01:00:00;;labpar1;;;;",
            "], stratigraphy.csv",
            ", [obsid;stratid;depthtop;depthbot;geology;geoshort;capacity;development;comment",
            ", P1;strat1;;;;;;;",
            "], meteo.csv",
            ", [obsid;instrumentid;parameter;date_time;reading_num;reading_txt;unit;comment",
            ", P1;meteoinst;precip;2017-01-01 00:19:00;;;;",
            "], obs_lines.csv",
            ", [obsid;name;place;type;source;geometry",
            ", L1;;;;;",
            "], seismic_data.csv",
            ", [obsid;length;ground;bedrock;gw_table;comment",
            ", L1;5.0;;;;",
            "]]"])

        assert test_string == reference_string
Ejemplo n.º 27
0
    def test_import_w_levels(self):
        file = [u'obsid,date_time,meas',
                 u'rb1,2016-03-15 10:30:00,5.0']

        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("rb1")''')

        with utils.tempinput(u'\n'.join(file), u'utf-8') as filename:
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
                    @mock.patch('import_data_to_db.utils.askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch.object(PyQt4.QtGui.QFileDialog, 'getOpenFileName')
                    def _test(self, filename, mock_filename, mock_skippopup, mock_encoding, mock_iface, mock_askuser):

                        mock_filename.return_value = filename
                        mock_encoding.return_value = [u'utf-8', True]

                        def side_effect(*args, **kwargs):
                            mock_result = mock.MagicMock()
                            if u'msg' in kwargs:
                                if kwargs[u'msg'].startswith(u'Does the file contain a header?'):
                                    mock_result.result = 1
                                    return mock_result
                            if len(args) > 1:
                                if args[1].startswith(u'Do you want to confirm'):
                                    mock_result.result = 0
                                    return mock_result
                                    #mock_askuser.return_value.result.return_value = 0
                                elif args[1].startswith(u'Do you want to import all'):
                                    mock_result.result = 0
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nForeign keys'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nThere are'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'It is a strong recommendation'):
                                    mock_result.result = 0
                                    return mock_result
                        mock_askuser.side_effect = side_effect

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = GeneralCsvImportGui(self.iface.mainWindow(), ms)
                        importer.load_gui()

                        importer.load_files()
                        importer.table_chooser.import_method = u'w_levels'

                        for column in importer.table_chooser.columns:
                            names = {u'obsid': u'obsid', u'date_time': u'date_time', u'meas': u'meas'}
                            if column.db_column in names:
                                column.file_column_name = names[column.db_column]

                        importer.start_import()

                    _test(self, filename)
                    test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'''select obsid, date_time, meas, h_toc, level_masl, comment from w_levels'''))
                    reference_string = ur'''(True, [(rb1, 2016-03-15 10:30:00, 5.0, None, None, None)])'''
                    assert test_string == reference_string
Ejemplo n.º 28
0
    def test_full_integration_test_to_db(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1202")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1608")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1615")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1505")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1512")''')
        utils.sql_alter_db(u'''INSERT INTO zz_staff ("staff") VALUES ("teststaff")''')

        utils.sql_alter_db(u'''INSERT or ignore INTO zz_flowtype ("type") VALUES ("Accvol")''')

        f = [
            u"Location;date_time;value;comment\n",
            u"Rb1202.sample;30-03-2016;15:31:30;hej2;s.comment\n",
            u"Rb1608.level;30-03-2016;15:34:40;testc;l.comment\n",
            u"Rb1615.flow;30-03-2016;15:30:09;357;f.Accvol.m3\n",
            u"Rb1615.flow;30-03-2016;15:30:09;gick bra;f.comment\n",
            u"Rb1608.level;30-03-2016;15:34:13;ergv;l.comment\n",
            u"Rb1608.level;30-03-2016;15:34:13;555;l.meas.m\n",
            u"Rb1512.sample;30-03-2016;15:31:30;899;s.turbiditet.FNU\n",
            u"Rb1505.quality;30-03-2016;15:29:26;hej;q.comment\n",
            u"Rb1505.quality;30-03-2016;15:29:26;863;q.konduktivitet.µS/cm\n",
            u"Rb1512.quality;30-03-2016;15:30:39;test;q.comment\n",
            u"Rb1512.quality;30-03-2016;15:30:39;67;q.syre.mg/L\n",
            u"Rb1512.quality;30-03-2016;15:30:39;8;q.temperatur.grC\n",
            u"Rb1512.quality;30-03-2016;15:30:40;58;q.syre.%\n",
            ]

        with utils.tempinput(''.join(f)) as filename:
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            @mock.patch('import_fieldlogger.utils.askuser')
            @mock.patch('import_fieldlogger.utils.NotFoundQuestion')
            @mock.patch('import_fieldlogger.utils.QtGui.QFileDialog.getOpenFileNames')
            @mock.patch('import_fieldlogger.utils.QtGui.QInputDialog.getText')
            @mock.patch('import_fieldlogger.utils.MessagebarAndLog')
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            def _full_integration_test_to_db(self, filename, mock_MessagebarAndLog, mock_charset, mock_savefilename, mock_ask_instrument, mock_vacuum):
                mock_vacuum.return_value.result = 1
                mock_charset.return_value = ('utf-8', True)
                mock_savefilename.return_value = [filename]
                mock_ask_instrument.return_value.value = u'testid'

                ms = MagicMock()
                ms.settingsdict = OrderedDict()
                importer = FieldloggerImport(self.iface.mainWindow(), ms)
                importer.parse_observations_and_populate_gui()

                #Set settings:
                for setting in importer.settings:
                    if isinstance(setting, import_fieldlogger.StaffQuestion):
                        setting.staff = u'teststaff'

                stored_settings = [[u's.comment', [[u'import_method', u'comments']]],
                                   [u'l.comment', [[u'import_method', u'comments']]],
                                   [u'f.comment', [[u'import_method', u'comments']]],
                                   [u'q.comment', [[u'import_method', u'comments']]],
                                   [u'l.meas.m', [[u'import_method', u'w_levels']]],
                                   [u'f.Accvol.m3', [[u'import_method', u'w_flow'], [u'flowtype', u'Accvol'], [u'unit', u'm3']]],
                                   [u's.turbiditet.FNU', [[u'import_method', u'w_qual_field'], [u'parameter', u'turbiditet'], [u'unit', u'FNU'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.konduktivitet.µS/cm', [[u'import_method', u'w_qual_field'], [u'parameter', u'konduktivitet'], [u'unit', u'µS/cm'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.syre.mg/L', [[u'import_method', u'w_qual_field'], [u'parameter', u'syre'], [u'unit', u'mg/L'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.syre.%', [[u'import_method', u'w_qual_field'], [u'parameter', u'syre'], [u'unit', u'%'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.temperatur.grC', [[u'import_method', u'w_qual_field'], [u'parameter', u'temperatur'], [u'unit', u'grC'], [u'depth', u''], [u'instrument', u'testid']]]]
                importer.set_parameters_using_stored_settings(stored_settings, importer.parameter_imports)
                importer.start_import(importer.observations)

            _full_integration_test_to_db(self, filename)

            test_string = create_test_string(dict([(k, utils.sql_load_fr_db(u'select * from %s'%k)) for k in (u'w_levels', u'w_qual_field', u'w_flow', u'zz_staff', u'comments')]))
            reference_string = u'{comments: (True, [(Rb1202, 2016-03-30 15:31:30, hej2, teststaff), (Rb1608, 2016-03-30 15:34:40, testc, teststaff)]), w_flow: (True, [(Rb1615, testid, Accvol, 2016-03-30 15:30:09, 357.0, m3, gick bra)]), w_levels: (True, [(Rb1608, 2016-03-30 15:34:13, 555.0, None, None, ergv)]), w_qual_field: (True, [(Rb1512, teststaff, 2016-03-30 15:30:39, testid, syre, 67.0, 67, mg/L, None, test), (Rb1512, teststaff, 2016-03-30 15:31:30, testid, turbiditet, 899.0, 899, FNU, None, None), (Rb1505, teststaff, 2016-03-30 15:29:26, testid, konduktivitet, 863.0, 863, µS/cm, None, hej), (Rb1512, teststaff, 2016-03-30 15:30:40, testid, syre, 58.0, 58, %, None, None), (Rb1512, teststaff, 2016-03-30 15:30:39, testid, temperatur, 8.0, 8, grC, None, test)]), zz_staff: (True, [(teststaff, None)])}'
            assert test_string == reference_string
Ejemplo n.º 29
0
    def test_full_into_zz_flowtype(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("2")''')
        #utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("5")''')
        f = [u'LOCATION;DATE;TIME;VALUE;TYPE\n',
            u'5.2892.level;12-12-2016;10:02:49;comment;l.comment\n',
            u'5.2892.level;12-12-2016;10:02:49;123;meas.m\n',
            u'5.2892.level;12-12-2016;10:02:57;onlycomment;l.comment\n',
            u'2.2892.flow;12-12-2016;10:03:07;123;Aveflow.m3/s\n',
            u'2.2892.flow;12-12-2016;10:03:15;onlycomment;f.comment\n',
            u'2.2892.comment;12-12-2016;10:03:24;onlycomment;comment\n']

        with utils.tempinput(''.join(f)) as filename:
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            @mock.patch('import_fieldlogger.utils.askuser')
            @mock.patch('import_fieldlogger.utils.NotFoundQuestion')
            @mock.patch('import_fieldlogger.utils.QtGui.QFileDialog.getOpenFileNames')
            @mock.patch('import_fieldlogger.utils.QtGui.QInputDialog.getText')
            @mock.patch('import_fieldlogger.utils.MessagebarAndLog')
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            def _test(self, filename, mock_MessagebarAndLog, mock_charset, mock_savefilename, mock_ask_instrument, mock_askuser):
                mock_charset.return_value = ('utf-8', True)
                mock_savefilename.return_value = [filename]
                mock_ask_instrument.return_value.value = u'testid'

                def side_effect(*args, **kwargs):
                    mock_result = mock.MagicMock()
                    if len(args) > 1:
                        if args[1].startswith(u'Do you want to confirm'):
                            mock_result.result = 0
                            return mock_result
                        elif args[1].startswith(u'Do you want to import all'):
                            mock_result.result = 0
                            return mock_result
                        elif args[1].startswith(u'Please note!\nForeign keys'):
                            mock_result.result = 1
                            return mock_result
                        elif args[1].startswith(u'Please note!\nThere are'):
                            mock_result.result = 1
                            return mock_result
                        elif args[1].startswith(u'It is a strong recommendation'):
                            mock_result.result = 0
                            return mock_result

                mock_askuser.side_effect = side_effect


                ms = MagicMock()
                ms.settingsdict = OrderedDict()
                importer = FieldloggerImport(self.iface.mainWindow(), ms)
                importer.parse_observations_and_populate_gui()

                #Set settings:
                for setting in importer.settings:
                    if isinstance(setting, import_fieldlogger.StaffQuestion):
                        setting.staff = u'teststaff'

                stored_settings = [[u'f.comment', [[u'import_method', u'comments']]],
                                   [u'Aveflow.m3/s', [[u'import_method', u'w_flow'], [u'flowtype', u'Momflow2'], [u'unit', u'aunit']]]]

                importer.set_parameters_using_stored_settings(stored_settings, importer.parameter_imports)
                importer.start_import(importer.observations)

            _test(self, filename)

            test_string = create_test_string(dict([(k, utils.sql_load_fr_db(u'select * from %s'%k)) for k in (u'w_flow', u'zz_staff', u'comments', u'zz_flowtype')]))
            reference_string = u'{comments: (True, [(2, 2016-12-12 10:03:15, onlycomment, teststaff)]), w_flow: (True, [(2, testid, Momflow2, 2016-12-12 10:03:07, 123.0, aunit, None)]), zz_flowtype: (True, [(Accvol, Accumulated volume), (Momflow, Momentary flow rate), (Aveflow, Average flow since last reading), (Momflow2, None)]), zz_staff: (True, [(teststaff, None)])}'
            assert test_string == reference_string
    def test_get_last_logger_dates(self):
        utils.sql_alter_db('''insert into obs_points (obsid) values ('rb1')''')
        utils.sql_alter_db('''insert into obs_points (obsid) values ('rb2')''')
        utils.sql_alter_db('''insert into w_levels_logger (obsid, date_time) values ('rb1', '2015-01-01 00:00')''')
        utils.sql_alter_db('''insert into w_levels_logger (obsid, date_time) values ('rb1', '2015-01-01 00:00:00')''')
        utils.sql_alter_db('''insert into w_levels_logger (obsid, date_time) values ('rb1', '2014-01-01 00:00:00')''')
        utils.sql_alter_db('''insert into w_levels_logger (obsid, date_time) values ('rb2', '2013-01-01 00:00:00')''')
        utils.sql_alter_db('''insert into w_levels_logger (obsid, date_time) values ('rb2', '2016-01-01 00:00')''')

        test_string = utils_for_tests.create_test_string(utils.get_last_logger_dates())
        reference_string = u'''{rb1: [(2015-01-01 00:00:00)], rb2: [(2016-01-01 00:00)]}'''
        assert test_string == reference_string