def calcall(self):
        fr_d_t = self.FromDateTime.dateTime().toPyDateTime()
        to_d_t = self.ToDateTime.dateTime().toPyDateTime()
        
#        sanity1 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.h_toc""")[1]
        sanity1 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid""")[1]
        sanity2 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.h_toc NOT NULL""")[1]
        
        if len(sanity1) == len(sanity2): #only if h_toc exists for all objects!!
            sql1 = """UPDATE OR IGNORE w_levels SET h_toc = (SELECT obs_points.h_toc FROM obs_points WHERE w_levels.obsid = obs_points.obsid) WHERE """
            sql1 += """date_time >= '"""
            sql1 += str(fr_d_t)
            sql1 += """' AND date_time <= '"""
            sql1 += str(to_d_t)
            sql1 += """' """
            utils.sql_alter_db(sql1)
            sql2 = """UPDATE OR IGNORE w_levels SET level_masl = h_toc - meas WHERE """
            sql2 += """date_time >= '"""
            sql2 += str(fr_d_t)
            sql2 += """' AND date_time <= '"""
            sql2 += str(to_d_t)
            sql2 += """' """        
            utils.sql_alter_db(sql2)
            self.close()
        else:
            utils.pop_up_info('Calculation aborted! There seems to be NULL values in your table obs_points, column h_toc.','Error')
            self.close()
    def calcselected(self):
        obsar = utils.getselectedobjectnames(self.layer)
        observations = obsar
        i=0
        for obs in obsar:
                observations[i] = obs.encode('utf-8') #turn into a list of python byte strings
                i += 1        
        fr_d_t = self.FromDateTime.dateTime().toPyDateTime()
        to_d_t = self.ToDateTime.dateTime().toPyDateTime()

        sanity1 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.obsid IN """ + (str(observations)).encode('utf-8').replace('[','(').replace(']',')'))[1]
        sanity2 = utils.sql_load_fr_db("""SELECT obs_points.h_toc FROM obs_points LEFT JOIN w_levels WHERE w_levels.obsid = obs_points.obsid AND obs_points.h_toc NOT NULL  AND obs_points.obsid IN """ + (str(observations)).encode('utf-8').replace('[','(').replace(']',')'))[1]

        if len(sanity1) == len(sanity2): #only if h_toc exists for all objects
            sql1 = """UPDATE OR IGNORE w_levels SET h_toc = (SELECT obs_points.h_toc FROM obs_points WHERE w_levels.obsid = obs_points.obsid) WHERE obsid IN """
            sql1 += str(observations)
            sql1 += """ AND date_time >= '"""
            sql1 += str(fr_d_t)
            sql1 += """' AND date_time <= '"""
            sql1 += str(to_d_t)
            sql1 += """' """   
            utils.sql_alter_db(sql1.replace("[","(").replace("]",")"))
            sql2 = """UPDATE OR IGNORE w_levels SET level_masl = h_toc - meas WHERE obsid IN """
            sql2 += str(observations)
            sql2 += """ AND date_time >= '"""
            sql2 += str(fr_d_t)
            sql2 += """' AND date_time <= '"""
            sql2 += str(to_d_t)
            sql2 += """' """        
            utils.sql_alter_db(sql2.replace("[","(").replace("]",")"))
            self.close()        
        else:
            utils.pop_up_info('Calculation aborted! There seems to be NULL values in your table obs_points, column h_toc.','Error')
            self.close()
 def calibrateandplot(self):
     obsid = unicode(self.combobox_obsid.currentText())
     if not obsid=='':
         sanity1sql = """select count(obsid) from w_levels_logger where obsid = '""" +  obsid[0] + """'"""
         sanity2sql = """select count(obsid) from w_levels_logger where head_cm not null and head_cm !='' and obsid = '""" +  obsid[0] + """'"""
         if utils.sql_load_fr_db(sanity1sql)[1] == utils.sql_load_fr_db(sanity2sql)[1]: # This must only be done if head_cm exists for all data
             fr_d_t = self.FromDateTime.dateTime().toPyDateTime()
             to_d_t = self.ToDateTime.dateTime().toPyDateTime()
             newzref = self.LoggerPos.text()
             if len(newzref)>0:
                 sql =r"""UPDATE w_levels_logger SET level_masl = """
                 sql += str(newzref)
                 sql += """ + head_cm / 100 WHERE obsid = '"""
                 sql += obsid   
                 sql += """' AND date_time >= '"""
                 sql += str(fr_d_t)
                 sql += """' AND date_time <= '"""
                 sql += str(to_d_t)
                 sql += """' """
                 dummy = utils.sql_alter_db(sql)
             self.CalibrationPlot(obsid)
             self.getlastcalibration()
         else:
             utils.pop_up_info("Calibration aborted!!\nThere must not be empty cells or\nnull values in the 'head_cm' column!")
     else:
         self.INFO.setText("Select the observation point with logger data to be calibrated.")
def GetStatistics(obsid = ''):
    Statistics_list = [0]*4

    columns = ['meas', 'level_masl']
    meas_or_level_masl= 'meas'#default value

    #number of values, also decide wehter to use meas or level_masl in report
    for column in columns:
        sql = r"""select Count(""" + column + r""") from w_levels where obsid = '"""
        sql += obsid
        sql += r"""'"""
        ConnectionOK, number_of_values = utils.sql_load_fr_db(sql)
        if number_of_values and number_of_values[0][0] > Statistics_list[2]:#this will select meas if meas >= level_masl
            meas_or_level_masl = column
            Statistics_list[2] = number_of_values[0][0]

    #min value
    if meas_or_level_masl=='meas':
        sql = r"""select min(meas) from w_levels where obsid = '"""
    else:
        sql = r"""select max(level_masl) from w_levels where obsid = '"""
    sql += obsid
    sql += r"""'"""
    ConnectionOK, min_value = utils.sql_load_fr_db(sql)
    if min_value:
        Statistics_list[0] = min_value[0][0]

    #median value
    sql = r"""SELECT x.obsid, x.""" + meas_or_level_masl + r""" as median from (select obsid, """ + meas_or_level_masl + r""" FROM w_levels WHERE obsid = '"""
    sql += obsid
    sql += r"""' and (typeof(""" + meas_or_level_masl + r""")=typeof(0.01) or typeof(""" + meas_or_level_masl + r""")=typeof(1))) as x, (select obsid, """ + meas_or_level_masl + r""" FROM w_levels WHERE obsid = '"""
    sql += obsid
    sql += r"""' and (typeof(""" + meas_or_level_masl + r""")=typeof(0.01) or typeof(""" + meas_or_level_masl + r""")=typeof(1))) as y GROUP BY x.""" + meas_or_level_masl + r""" HAVING SUM(CASE WHEN y.""" + meas_or_level_masl + r""" <= x.""" + meas_or_level_masl + r""" THEN 1 ELSE 0 END)>=(COUNT(*)+1)/2 AND SUM(CASE WHEN y.""" + meas_or_level_masl + r""" >= x.""" + meas_or_level_masl + r""" THEN 1 ELSE 0 END)>=(COUNT(*)/2)+1"""
    ConnectionOK, median_value = utils.sql_load_fr_db(sql)
    if median_value:
        Statistics_list[1] = median_value[0][1]

    #max value
    if meas_or_level_masl=='meas':
        sql = r"""select max(meas) from w_levels where obsid = '"""
    else:
        sql = r"""select min(level_masl) from w_levels where obsid = '"""
    sql += obsid
    sql += r"""'"""
    ConnectionOK, max_value = utils.sql_load_fr_db(sql)
    if max_value:
        Statistics_list[3] = max_value[0][0]

    return meas_or_level_masl, Statistics_list
 def sql_into_recarray(self, sql):
     """ Converts and runs an sql-string and turns the answer into an np.recarray and returns it""" 
     my_format = [('date_time', datetime.datetime), ('values', float)] #Define (with help from function datetime) a good format for numpy array     
     recs = utils.sql_load_fr_db(sql)[1]
     table = np.array(recs, dtype=my_format)  #NDARRAY
     table2=table.view(np.recarray)   # RECARRAY   Makes the two columns inte callable objects, i.e. write table2.values 
     return table2        
    def test_create_db_locale_se(self, mock_locale):
        mock_locale.return_value = [u'se_SV']

        self.midvatten.new_db(u'sv_SE')
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select * from zz_strat'))
        reference_string = ur"""(True, [(okänt, white, , white, NoBrush, not in ('berg','b','rock','ro','grovgrus','grg','coarse gravel','cgr','grus','gr','gravel','mellangrus','grm','medium gravel','mgr','fingrus','grf','fine gravel','fgr','grovsand','sag','coarse sand','csa','sand','sa','mellansand','sam','medium sand','msa','finsand','saf','fine sand','fsa','silt','si','lera','ler','le','clay','cl','morän','moran','mn','till','ti','torv','t','peat','pt','fyll','fyllning','f','made ground','mg','land fill')), (berg, red, x, red, DiagCrossPattern, in ('berg','b','rock','ro')), (grovgrus, DarkGreen, O, darkGreen, Dense7Pattern, in ('grovgrus','grg','coarse gravel','cgr')), (grus, DarkGreen, O, darkGreen, Dense7Pattern, in ('grus','gr','gravel')), (mellangrus, DarkGreen, o, darkGreen, Dense6Pattern, in ('mellangrus','grm','medium gravel','mgr')), (fingrus, DarkGreen, o, darkGreen, Dense6Pattern, in ('fingrus','grf','fine gravel','fgr')), (grovsand, green, *, green, Dense5Pattern, in ('grovsand','sag','coarse sand','csa')), (sand, green, *, green, Dense5Pattern, in ('sand','sa')), (mellansand, green, ., green, Dense4Pattern, in ('mellansand','sam','medium sand','msa')), (finsand, DarkOrange, ., orange, Dense5Pattern, in ('finsand','saf','fine sand','fsa')), (silt, yellow, \\, yellow, BDiagPattern, in ('silt','si')), (lera, yellow, -, yellow, HorPattern, in ('lera','ler','le','clay','cl')), (morän, cyan, /, yellow, CrossPattern, in ('morän','moran','mn','till','ti')), (torv, DarkGray, +, darkGray, NoBrush, in ('torv','t','peat','pt')), (fyll, white, +, white, DiagCrossPattern, in ('fyll','fyllning','f','made ground','mg','land fill'))])"""
        assert test_string == reference_string
 def test_add_triggers_not_change_existing(self):
     """ Adding triggers should not automatically change the db """
     utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "east", "north") VALUES ('rb1', 1, 1)''')
     test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
     utils.add_triggers_to_obs_points()
     reference_string = u'(True, [(rb1, 1.0, 1.0, None)])'
     assert test_string == reference_string
Example #8
0
 def get_piper_data(self):
     #These observations are supposed to be in mg/l and must be stored in a Midvatten database, table w_qual_lab
     sql = self.big_sql()
     print(sql)#debug
     # get data into a list: obsid, date_time, type, Cl_meqPl, HCO3_meqPl, SO4_meqPl, Na+K_meqPl, Ca_meqPl, Mg_meqPl
     obsimport = utils.sql_load_fr_db(sql)[1]
     #convert to numpy ndarray W/O format specified
     self.obsnp_nospecformat = np.array(obsimport)
     #define format
     """ some problems with string fields
     np.str_
     My_format = [('obsid', str), 
     My_format = [('obsid', unicode), 
     My_format = [('obsid', np.dtype('a35')), 
     My_format = [('obsid', np.dtype(np.str_)),
     My_format = [('obsid', np.str_),
     My_format = [('obsid', object),
     none is working besides from 'a35' which limits string length to 35 characters 
     least bad is the "object" type, then everything is loaded, but all strings as unicode strings which _should_ be ok
     """
     My_format = [('obsid', object), ('date_time', datetime.datetime),('obstype', object),('Cl_meqPl', float),('HCO3_meqPl', float),('SO4_meqPl', float),('NaK_meqPl', float),('Ca_meqPl', float),('Mg_meqPl', float)]
     #convert to numpy ndarray W format specified - i.e. a structured array
     self.obsnp_specified_format = np.array(obsimport, dtype=My_format)
     #convert to np recarray - takes the structured array and makes the columns into callable objects, i.e. write table2.Cl_meqPl
     self.obsrecarray=self.obsnp_specified_format.view(np.recarray)
Example #9
0
 def test_create_db_se(self, mock_locale, mock_iface):
     mock_locale.return_value.answer = u'ok'
     mock_locale.return_value.value = u'sv_SE'
     self.midvatten.new_db()
     test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select * from zz_strat'))
     reference_string = ur"""(True, [(berg, berg), (b, berg), (rock, berg), (ro, berg), (grovgrus, grovgrus), (grg, grovgrus), (coarse gravel, grovgrus), (cgr, grovgrus), (grus, grus), (gr, grus), (gravel, grus), (mellangrus, mellangrus), (grm, mellangrus), (medium gravel, mellangrus), (mgr, mellangrus), (fingrus, fingrus), (grf, fingrus), (fine gravel, fingrus), (fgr, fingrus), (grovsand, grovsand), (sag, grovsand), (coarse sand, grovsand), (csa, grovsand), (sand, sand), (sa, sand), (mellansand, mellansand), (sam, mellansand), (medium sand, mellansand), (msa, mellansand), (finsand, finsand), (saf, finsand), (fine sand, finsand), (fsa, finsand), (silt, silt), (si, silt), (lera, lera), (ler, lera), (le, lera), (clay, lera), (cl, lera), (morän, morän), (moran, morän), (mn, morän), (till, morän), (ti, morän), (torv, torv), (t, torv), (peat, torv), (pt, torv), (fyll, fyll), (fyllning, fyll), (f, fyll), (made ground, fyll), (mg, fyll), (land fill, fyll)])"""
     assert test_string == reference_string
     current_locale = utils.getcurrentlocale()[0]
     assert current_locale == u'sv_SE'
    def test_add_east_north_from_geometry(self):
        """ Test that adding triggers and adding obsid with geometry also adds east, north
        :return:
        """
        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb1', GeomFromText('POINT(1.0 1.0)', 3006))""")

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 1.0, POINT(1 1))])'
        assert test_string == reference_string
    def test_add_geometry_from_east_north(self):
        """ Test that adding triggers and adding obsid with east, north also adds geometry
        :return:
        """
        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "east", "north") VALUES ('rb1', 1, 1)''')

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 1.0, POINT(1 1))])'
        assert test_string == reference_string
Example #12
0
    def test_add_triggers_not_deleting_geom_when_east_north_null(self):
        """ Adding triggers should not automatically delete geometry when east AND north is NULL """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb1', GeomFromText('POINT(1.0 1.0)', 3006))""")
        #After the first: u'(True, [(rb1, None, None, POINT(1 1))])

        utils.add_triggers_to_obs_points()

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, None, POINT(1 1))])'
        assert test_string == reference_string
Example #13
0
    def test_add_triggers_add_east_north(self):
        """ Updating coordinates from NULL should create geometry. """
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "east", "north") VALUES ('rb1', NULL, NULL)''')

        utils.add_triggers_to_obs_points()

        utils.sql_alter_db(u"""update obs_points set east='1.0', north='2.0'""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 2.0, POINT(1 2))])'
        assert test_string == reference_string
    def test_add_trigger_add_obsid_without_anything(self):
        """ Test that adding triggers and updating obsid from east, north don't set null values for previous obsid.
        :return:
        """
        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid) VALUES ('rb1')""")
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid) VALUES ('rb2')""")

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, None, None), (rb2, None, None, None)])'
        assert test_string == reference_string
 def do_it(self):
     """Update positions for all observations in self.observations"""
     # First find EPSG-ID for the CRS
     sql = r"""SELECT srid FROM geometry_columns where f_table_name = 'obs_points'"""
     ConnectionOK, result = utils.sql_load_fr_db(sql)
     EPSGID= str(result[0][0])
     #Then do the operation
     sql = r"""Update or ignore 'obs_points' SET Geometry=MakePoint(east, north, """
     sql += EPSGID
     sql += """) WHERE obsid IN """ + self.sqlpart2
     utils.sql_alter_db(sql)
    def test_add_trigger_add_east_north_not_nulling_east_north(self):
        """ Test that adding triggers and adding obsid from east, north don't set null values for previous obsid.
        :return:
        """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north) VALUES ('rb1', 1, 1)""")

        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north) VALUES ('rb2', 2, 2)""")

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, 1.0, None), (rb2, 2.0, 2.0, POINT(2 2))])'
        assert test_string == reference_string
Example #17
0
    def test_calcall(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid", "h_toc") VALUES ('rb1', 1)''')
        utils.sql_alter_db(u'''INSERT into w_levels ("obsid", "meas", "date_time") VALUES ('rb1', 222, '2005-01-01 00:00:00')''')
        self.calclvl.FromDateTime = QtGui.QDateTimeEdit()
        self.calclvl.FromDateTime.setDateTime(datestring_to_date(u'2000-01-01 00:00:00'))
        self.calclvl.ToDateTime = QtGui.QDateTimeEdit()
        self.calclvl.ToDateTime.setDateTime(datestring_to_date(u'2010-01-01 00:00:00'))
        self.calclvl.calcall()

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, date_time, meas, h_toc, level_masl from w_levels'))
        reference_string = u'(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0)])'
        assert test_string == reference_string
 def GetData(self, obsid = '', tablename='', debug = 'n'):            # GetData method that returns a table with water quality data
     # Load all data in obs_points table
     sql = r"""select * from """
     sql += tablename
     sql += r""" where obsid = '"""
     sql += obsid   
     sql += r"""'"""
     if tablename == 'stratigraphy':
         sql += r""" order by stratid"""
     if debug == 'y':
         utils.pop_up_info(sql)
     ConnectionOK, data = utils.sql_load_fr_db(sql)
     return ConnectionOK, data
    def test_add_trigger_add_geometry_not_nulling_geometry(self):
        """ Test that adding triggers and adding obsid don't set null values for previous obsid.
        :return:
        """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb1', GeomFromText('POINT(1.0 1.0)', 3006))""")
        #After the first: u'(True, [(rb1, None, None, POINT(1 1))])

        utils.add_triggers_to_obs_points()
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb2', GeomFromText('POINT(2.0 2.0)', 3006))""")
        #After the second: u'(True, [(rb1, 1.0, 1.0, POINT(1 1)), (rb2, 2.0, 2.0, POINT(2 2))])

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, None, POINT(1 1)), (rb2, 2.0, 2.0, POINT(2 2))])'
        assert test_string == reference_string
 def getlastcalibration(self):
     obsid = self.load_obsid_and_init()
     if not obsid=='':
         sql = """SELECT MAX(date_time), loggerpos FROM (SELECT date_time, (level_masl - (head_cm/100)) as loggerpos FROM w_levels_logger WHERE level_masl > -990 AND obsid = '"""
         sql += obsid
         sql += """')"""
         self.lastcalibr = utils.sql_load_fr_db(sql)[1]
         if self.lastcalibr[0][1] and self.lastcalibr[0][0]:
             text = """Last pos. for logger in """
             text += obsid
             text += """\nwas """ + str(self.lastcalibr[0][1]) + """ masl\nat """ +  str(self.lastcalibr[0][0])
         else:
             text = """There is no earlier known\nposition for the logger\nin """ + unicode(self.combobox_obsid.currentText())#self.obsid[0]
         self.INFO.setText(text)
    def test_update_trigger_add_east_north_not_nulling_east_north(self):
        """ Test that adding triggers and updating obsid from east, north don't set null values for previous obsid.
        :return:
        """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north, geometry) VALUES ('rb1', 1, 1, GeomFromText('POINT(1.0 1.0)', 3006))""")
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, east, north, geometry) VALUES ('rb2', 2, 2, GeomFromText('POINT(2.0 2.0)', 3006))""")

        utils.add_triggers_to_obs_points()

        utils.sql_alter_db(u'''UPDATE obs_points SET east = 3, north = 3 WHERE (obsid = 'rb1')''')

        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 3.0, 3.0, POINT(3 3)), (rb2, 2.0, 2.0, POINT(2 2))])'
        assert test_string == reference_string
    def calibrate(self):
        self.calib_help.setText("Calibrating")
        PyQt4.QtGui.QApplication.setOverrideCursor(PyQt4.QtCore.Qt.WaitCursor)
        obsid = self.load_obsid_and_init()
        if not obsid=='':        
            sanity1sql = """select count(obsid) from w_levels_logger where obsid = '""" +  obsid[0] + """'"""
            sanity2sql = """select count(obsid) from w_levels_logger where head_cm not null and head_cm !='' and obsid = '""" +  obsid[0] + """'"""
            if utils.sql_load_fr_db(sanity1sql)[1] == utils.sql_load_fr_db(sanity2sql)[1]: # This must only be done if head_cm exists for all data
                fr_d_t = self.FromDateTime.dateTime().toPyDateTime()
                to_d_t = self.ToDateTime.dateTime().toPyDateTime()

                if self.loggerpos_masl_or_offset_state == 1:
                    self.update_level_masl_from_head(obsid, fr_d_t, to_d_t, self.LoggerPos.text())
                else:
                    self.update_level_masl_from_level_masl(obsid, fr_d_t, to_d_t, self.LoggerPos.text())

                self.getlastcalibration()
            else:
                utils.pop_up_info("Calibration aborted!!\nThere must not be empty cells or\nnull values in the 'head_cm' column!")
        else:
            self.INFO.setText("Select the observation point with logger data to be calibrated.")
        self.calib_help.setText("")
        PyQt4.QtGui.QApplication.restoreOverrideCursor()
 def __init__(self, observations=[]):#observations is supposed to be a list of unicode strings
     self.observations = observations
     i = 0
     for obs in observations:
             self.observations[i] = obs.encode('utf-8') #turn into a list of python byte strings
             i += 1
     self.sqlpart2 =(str(self.observations).encode('utf-8').replace('[','(')).replace(']',')')#turn list into string and also encode to utf-8 byte string to enable replace
     """check whether there are observations without coordinates"""
     sql = r"""select obsid from obs_points where (east is null or east ='' or  north is null or north = '') and obsid in """ + self.sqlpart2
     ConnectionOK, result = utils.sql_load_fr_db(sql)
     if len(result)==0:
         self.do_it()
     else:
         utils.pop_up_info("Coordinates are missing for\n" + result[0][0] + "\nPositions (geometry) will not be updated.")
Example #24
0
    def test_add_triggers_not_deleting_geom_when_one_east_north_null(self):
        """ Adding triggers should not automatically delete geometry when east OR north is NULL """
        utils.sql_alter_db(u"""INSERT INTO obs_points (obsid, geometry) VALUES ('rb1', GeomFromText('POINT(1.0 1.0)', 3006))""")

        utils.add_triggers_to_obs_points()

        utils.sql_alter_db(u"""update obs_points set east=X(geometry) where east is null and geometry is not null""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 1.0, None, POINT(1 1))])'
        assert test_string == reference_string

        utils.sql_alter_db(u"""update obs_points set east='2.0'""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 2.0, None, POINT(1 1))])'
        assert test_string == reference_string

        utils.sql_alter_db(u"""update obs_points set east=NULL""")
        utils.sql_alter_db(u"""update obs_points set north='2.0'""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, 2.0, POINT(1 1))])'
        assert test_string == reference_string

        utils.sql_alter_db(u"""update obs_points set east='3.0'""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 3.0, 2.0, POINT(3 2))])'
        assert test_string == reference_string

        utils.sql_alter_db(u"""update obs_points set east='4.0'""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 4.0, 2.0, POINT(4 2))])'
        assert test_string == reference_string

        utils.sql_alter_db(u"""update obs_points set east=NULL, north=NULL""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, None, None, POINT(4 2))])'
        assert test_string == reference_string

        utils.sql_alter_db(u"""update obs_points set east='5.0', north='6.0'""")
        test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'select obsid, east, north, AsText(geometry) from obs_points'))
        reference_string = u'(True, [(rb1, 5.0, 6.0, POINT(5 6))])'
        assert test_string == reference_string
 def PopulateFilterList(self, table, QListWidgetname='', filtercolumn=None):
     sql = "select distinct " + unicode(filtercolumn) + " from " + table + " order by " + unicode(filtercolumn)
     ConnectionOK, list_data=utils.sql_load_fr_db(sql)
     for post in list_data:
         item = QtGui.QListWidgetItem(unicode(post[0]))
         getattr(self, QListWidgetname).addItem(item)
Example #26
0
    def test_full_into_zz_flowtype(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("2")''')
        #utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("5")''')
        f = [u'LOCATION;DATE;TIME;VALUE;TYPE\n',
            u'5.2892.level;12-12-2016;10:02:49;comment;l.comment\n',
            u'5.2892.level;12-12-2016;10:02:49;123;meas.m\n',
            u'5.2892.level;12-12-2016;10:02:57;onlycomment;l.comment\n',
            u'2.2892.flow;12-12-2016;10:03:07;123;Aveflow.m3/s\n',
            u'2.2892.flow;12-12-2016;10:03:15;onlycomment;f.comment\n',
            u'2.2892.comment;12-12-2016;10:03:24;onlycomment;comment\n']

        with utils.tempinput(''.join(f)) as filename:
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            @mock.patch('import_fieldlogger.utils.askuser')
            @mock.patch('import_fieldlogger.utils.NotFoundQuestion')
            @mock.patch('import_fieldlogger.utils.QtGui.QFileDialog.getOpenFileNames')
            @mock.patch('import_fieldlogger.utils.QtGui.QInputDialog.getText')
            @mock.patch('import_fieldlogger.utils.MessagebarAndLog')
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            def _test(self, filename, mock_MessagebarAndLog, mock_charset, mock_savefilename, mock_ask_instrument, mock_askuser):
                mock_charset.return_value = ('utf-8', True)
                mock_savefilename.return_value = [filename]
                mock_ask_instrument.return_value.value = u'testid'

                def side_effect(*args, **kwargs):
                    mock_result = mock.MagicMock()
                    if len(args) > 1:
                        if args[1].startswith(u'Do you want to confirm'):
                            mock_result.result = 0
                            return mock_result
                        elif args[1].startswith(u'Do you want to import all'):
                            mock_result.result = 0
                            return mock_result
                        elif args[1].startswith(u'Please note!\nForeign keys'):
                            mock_result.result = 1
                            return mock_result
                        elif args[1].startswith(u'Please note!\nThere are'):
                            mock_result.result = 1
                            return mock_result
                        elif args[1].startswith(u'It is a strong recommendation'):
                            mock_result.result = 0
                            return mock_result

                mock_askuser.side_effect = side_effect


                ms = MagicMock()
                ms.settingsdict = OrderedDict()
                importer = FieldloggerImport(self.iface.mainWindow(), ms)
                importer.parse_observations_and_populate_gui()

                #Set settings:
                for setting in importer.settings:
                    if isinstance(setting, import_fieldlogger.StaffQuestion):
                        setting.staff = u'teststaff'

                stored_settings = [[u'f.comment', [[u'import_method', u'comments']]],
                                   [u'Aveflow.m3/s', [[u'import_method', u'w_flow'], [u'flowtype', u'Momflow2'], [u'unit', u'aunit']]]]

                importer.set_parameters_using_stored_settings(stored_settings, importer.parameter_imports)
                importer.start_import(importer.observations)

            _test(self, filename)

            test_string = create_test_string(dict([(k, utils.sql_load_fr_db(u'select * from %s'%k)) for k in (u'w_flow', u'zz_staff', u'comments', u'zz_flowtype')]))
            reference_string = u'{comments: (True, [(2, 2016-12-12 10:03:15, onlycomment, teststaff)]), w_flow: (True, [(2, testid, Momflow2, 2016-12-12 10:03:07, 123.0, aunit, None)]), zz_flowtype: (True, [(Accvol, Accumulated volume), (Momflow, Momentary flow rate), (Aveflow, Average flow since last reading), (Momflow2, None)]), zz_staff: (True, [(teststaff, None)])}'
            assert test_string == reference_string
    def __init__(self, obsid='', settingsdict = {}):
         #open connection to report file
        reportfolder = os.path.join(QDir.tempPath(), 'midvatten_reports')
        if not os.path.exists(reportfolder):
            os.makedirs(reportfolder)
        reportpath = os.path.join(reportfolder, "drill_report.html")
        logopath = os.path.join(os.sep,os.path.dirname(__file__),"..","about","midvatten_logga.png")
        imgpath = os.path.join(os.sep,os.path.dirname(__file__),"..","reports")
        f = codecs.open(reportpath, "wb", "utf-8")
        
        #write some initiating html, header and also 
        rpt = r"""<meta http-equiv="content-type" content="text/html; charset=utf-8" />""" 
        rpt += r"""<head><title>%s General report from Midvatten plugin for QGIS</title></head>"""%obsid
        rpt += r"""<html><TABLE WIDTH=100% BORDER=0 CELLPADDING=1 CELLSPACING=1><TR VALIGN=TOP><TD WIDTH=15%><h3 style="font-family:'arial';font-size:18pt; font-weight:600">"""
        rpt += obsid
        if  utils.getcurrentlocale() == 'sv_SE':
            rpt += ''.join([r'''</h3><img src="''', os.path.join(imgpath, 'for_general_report_sv.png'), r'''" /><br><img src=''', r"""'"""])
            #rpt += r"""</h3><img src="for_general_report_sv.png" /><br><img src='"""
        else:
            rpt += ''.join([r'''</h3><img src="''', os.path.join(imgpath, 'for_general_report.png'), r'''" /><br><img src=''', r"""'"""])
            #rpt += r"""</h3><img src="for_general_report.png" /><br><img src='"""
        rpt += logopath
        rpt +="""' /></TD><TD WIDTH=85%><TABLE WIDTH=100% BORDER=1 CELLPADDING=4 CELLSPACING=3><TR VALIGN=TOP><TD WIDTH=50%><P><U><B>"""
        if  utils.getcurrentlocale() == 'sv_SE':
            rpt += u'Allmän information' 
        else:
            rpt += u'General information' 
        rpt += r"""</B></U></P><TABLE style="font-family:'arial'; font-size:10pt; font-weight:400; font-style:normal;" WIDTH=100% BORDER=0 CELLPADDING=0 CELLSPACING=1><COL WIDTH=43*><COL WIDTH=43*>"""
        f.write(rpt)
        
        # GENERAL DATA UPPER LEFT QUADRANT
        ConnectionOK, GeneralData = self.GetData(obsid, 'obs_points', 'n')#MacOSX fix1
        #utils.pop_up_info(str(ConnectionOK))#debug
        if ConnectionOK==True:
            result2 = (utils.sql_load_fr_db(r"""SELECT srid FROM geometry_columns where f_table_name = 'obs_points'""")[1])[0][0]
            CRS = utils.returnunicode(result2) #1st we need crs
            result3 = (utils.sql_load_fr_db(r"""SELECT ref_sys_name FROM spatial_ref_sys where srid =""" + CRS)[1])[0][0]
            CRSname = utils.returnunicode(result3) # and crs name
            if  utils.getcurrentlocale() == 'sv_SE':
                reportdata_1 = self.rpt_upper_left_sv(GeneralData, CRS, CRSname)
            else:
                reportdata_1 = self.rpt_upper_left(GeneralData, CRS, CRSname)
            f.write(reportdata_1)

            rpt = r"""</TABLE></TD><TD WIDTH=50%><P><U><B>"""
            if  utils.getcurrentlocale() == 'sv_SE':
                rpt += u'Lagerföljd' 
            else:
                rpt += u'Stratigraphy' 
            rpt += r"""</B></U></P><TABLE style="font-family:'arial'; font-size:10pt; font-weight:400; font-style:normal;" WIDTH=100% BORDER=0 CELLPADDING=0 CELLSPACING=1><COL WIDTH=43*><COL WIDTH=43*><COL WIDTH=43*><COL WIDTH=43*><COL WIDTH=43*><COL WIDTH=43*>"""
            f.write(rpt)        

            # STRATIGRAPHY DATA UPPER RIGHT QUADRANT
            StratData = self.GetData(obsid, 'stratigraphy', 'n')[1] #MacOSX fix1
            if  utils.getcurrentlocale() == 'sv_SE':
                reportdata_2 = self.rpt_upper_right_sv(StratData)
            else:
                reportdata_2 = self.rpt_upper_right(StratData)
            f.write(reportdata_2)

            rpt = r"""</TABLE></TD></TR><TR VALIGN=TOP><TD WIDTH=50%><P><U><B>""" 
            if  utils.getcurrentlocale() == 'sv_SE':
                rpt += u'Kommentarer' 
            else:
                rpt += u'Comments' 
            rpt += r"""</B></U></P>"""
            f.write(rpt)        

            # COMMENTS LOWER LEFT QUADRANT
            reportdata_3 = self.rpt_lower_left(GeneralData)
            f.write(reportdata_3)

            rpt = r"""</TD><TD WIDTH=50%><P><U><B>""" 
            if  utils.getcurrentlocale() == 'sv_SE':
                rpt += u'Vattennivåer' 
            else:
                rpt += u'Water levels' 
            rpt += r"""</B></U></P>"""
            f.write(rpt)

            # WATER LEVEL STATISTICS LOWER RIGHT QUADRANT
            meas_or_level_masl, statistics = GetStatistics(obsid)#MacOSX fix1
            if  utils.getcurrentlocale() == 'sv_SE':
                reportdata_4 = self.rpt_lower_right_sv(statistics,meas_or_level_masl)
            else:
                reportdata_4 = self.rpt_lower_right(statistics,meas_or_level_masl)
            f.write(reportdata_4)
            
            f.write(r"""</TD></TR></TABLE></TD></TR></TABLE>""")    
            f.write("\n</p></body></html>")        
            f.close()
            #print reportpath#debug
            QDesktopServices.openUrl(QUrl.fromLocalFile(reportpath))
Example #28
0
    def test_full_integration_test_to_db(self):
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1202")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1608")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1615")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1505")''')
        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("Rb1512")''')
        utils.sql_alter_db(u'''INSERT INTO zz_staff ("staff") VALUES ("teststaff")''')

        utils.sql_alter_db(u'''INSERT or ignore INTO zz_flowtype ("type") VALUES ("Accvol")''')

        f = [
            u"Location;date_time;value;comment\n",
            u"Rb1202.sample;30-03-2016;15:31:30;hej2;s.comment\n",
            u"Rb1608.level;30-03-2016;15:34:40;testc;l.comment\n",
            u"Rb1615.flow;30-03-2016;15:30:09;357;f.Accvol.m3\n",
            u"Rb1615.flow;30-03-2016;15:30:09;gick bra;f.comment\n",
            u"Rb1608.level;30-03-2016;15:34:13;ergv;l.comment\n",
            u"Rb1608.level;30-03-2016;15:34:13;555;l.meas.m\n",
            u"Rb1512.sample;30-03-2016;15:31:30;899;s.turbiditet.FNU\n",
            u"Rb1505.quality;30-03-2016;15:29:26;hej;q.comment\n",
            u"Rb1505.quality;30-03-2016;15:29:26;863;q.konduktivitet.µS/cm\n",
            u"Rb1512.quality;30-03-2016;15:30:39;test;q.comment\n",
            u"Rb1512.quality;30-03-2016;15:30:39;67;q.syre.mg/L\n",
            u"Rb1512.quality;30-03-2016;15:30:39;8;q.temperatur.grC\n",
            u"Rb1512.quality;30-03-2016;15:30:40;58;q.syre.%\n",
            ]

        with utils.tempinput(''.join(f)) as filename:
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            @mock.patch('import_fieldlogger.utils.askuser')
            @mock.patch('import_fieldlogger.utils.NotFoundQuestion')
            @mock.patch('import_fieldlogger.utils.QtGui.QFileDialog.getOpenFileNames')
            @mock.patch('import_fieldlogger.utils.QtGui.QInputDialog.getText')
            @mock.patch('import_fieldlogger.utils.MessagebarAndLog')
            @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
            def _full_integration_test_to_db(self, filename, mock_MessagebarAndLog, mock_charset, mock_savefilename, mock_ask_instrument, mock_vacuum):
                mock_vacuum.return_value.result = 1
                mock_charset.return_value = ('utf-8', True)
                mock_savefilename.return_value = [filename]
                mock_ask_instrument.return_value.value = u'testid'

                ms = MagicMock()
                ms.settingsdict = OrderedDict()
                importer = FieldloggerImport(self.iface.mainWindow(), ms)
                importer.parse_observations_and_populate_gui()

                #Set settings:
                for setting in importer.settings:
                    if isinstance(setting, import_fieldlogger.StaffQuestion):
                        setting.staff = u'teststaff'

                stored_settings = [[u's.comment', [[u'import_method', u'comments']]],
                                   [u'l.comment', [[u'import_method', u'comments']]],
                                   [u'f.comment', [[u'import_method', u'comments']]],
                                   [u'q.comment', [[u'import_method', u'comments']]],
                                   [u'l.meas.m', [[u'import_method', u'w_levels']]],
                                   [u'f.Accvol.m3', [[u'import_method', u'w_flow'], [u'flowtype', u'Accvol'], [u'unit', u'm3']]],
                                   [u's.turbiditet.FNU', [[u'import_method', u'w_qual_field'], [u'parameter', u'turbiditet'], [u'unit', u'FNU'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.konduktivitet.µS/cm', [[u'import_method', u'w_qual_field'], [u'parameter', u'konduktivitet'], [u'unit', u'µS/cm'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.syre.mg/L', [[u'import_method', u'w_qual_field'], [u'parameter', u'syre'], [u'unit', u'mg/L'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.syre.%', [[u'import_method', u'w_qual_field'], [u'parameter', u'syre'], [u'unit', u'%'], [u'depth', u''], [u'instrument', u'testid']]],
                                   [u'q.temperatur.grC', [[u'import_method', u'w_qual_field'], [u'parameter', u'temperatur'], [u'unit', u'grC'], [u'depth', u''], [u'instrument', u'testid']]]]
                importer.set_parameters_using_stored_settings(stored_settings, importer.parameter_imports)
                importer.start_import(importer.observations)

            _full_integration_test_to_db(self, filename)

            test_string = create_test_string(dict([(k, utils.sql_load_fr_db(u'select * from %s'%k)) for k in (u'w_levels', u'w_qual_field', u'w_flow', u'zz_staff', u'comments')]))
            reference_string = u'{comments: (True, [(Rb1202, 2016-03-30 15:31:30, hej2, teststaff), (Rb1608, 2016-03-30 15:34:40, testc, teststaff)]), w_flow: (True, [(Rb1615, testid, Accvol, 2016-03-30 15:30:09, 357.0, m3, gick bra)]), w_levels: (True, [(Rb1608, 2016-03-30 15:34:13, 555.0, None, None, ergv)]), w_qual_field: (True, [(Rb1512, teststaff, 2016-03-30 15:30:39, testid, syre, 67.0, 67, mg/L, None, test), (Rb1512, teststaff, 2016-03-30 15:31:30, testid, turbiditet, 899.0, 899, FNU, None, None), (Rb1505, teststaff, 2016-03-30 15:29:26, testid, konduktivitet, 863.0, 863, µS/cm, None, hej), (Rb1512, teststaff, 2016-03-30 15:30:40, testid, syre, 58.0, 58, %, None, None), (Rb1512, teststaff, 2016-03-30 15:30:39, testid, temperatur, 8.0, 8, grC, None, test)]), zz_staff: (True, [(teststaff, None)])}'
            assert test_string == reference_string
Example #29
0
 def get_selected_datetimes(self):
     sql1 = self.big_sql()
     sql2 = r""" select distinct date_time from (""" + sql1 + r""") order by date_time"""
     ConnOK, self.date_times = utils.sql_load_fr_db(sql2)
Example #30
0
 def get_selected_obstypes(self):
     sql = "select obsid, type from obs_points where obsid in " +  str(self.observations).encode('utf-8').replace('[','(').replace(']',')')
     ConnOK, types = utils.sql_load_fr_db(sql)
     self.typedict = dict(types)#make it a dictionary
     sql = "select distinct type from obs_points where obsid in " +  str(self.observations).encode('utf-8').replace('[','(').replace(']',')')
     ConnOK, self.distincttypes = utils.sql_load_fr_db(sql)