def get_statistics_for_single_obsid(obsid ='', table='w_levels', data_columns=None): Statistics_list = [0]*4 if data_columns is None: data_columns = ['meas', 'level_masl'] data_column = data_columns[0] #default value #number of values, also decide wehter to use meas or level_masl in report for column in data_columns: sql = r"""select Count(%s) from %s where obsid = '%s'"""%(column, table, obsid) ConnectionOK, number_of_values = db_utils.sql_load_fr_db(sql) if number_of_values and number_of_values[0][0] > Statistics_list[2]:#this will select meas if meas >= level_masl data_column = column Statistics_list[2] = number_of_values[0][0] #min value sql = r"""select min(%s) from %s where obsid = '%s'"""%(data_column, table, obsid) ConnectionOK, min_value = db_utils.sql_load_fr_db(sql) if min_value: Statistics_list[0] = min_value[0][0] #median value median_value = db_utils.calculate_median_value(table, data_column, obsid) if median_value: Statistics_list[1] = median_value #max value sql = r"""select max(%s) from %s where obsid = '%s'"""%(data_column, table, obsid) ConnectionOK, max_value = db_utils.sql_load_fr_db(sql) if max_value: Statistics_list[3] = max_value[0][0] return data_column, Statistics_list
def test_add_view_obs_points_obs_lines_add(self, mock_messagebar): db_utils.sql_alter_db('''DROP VIEW IF EXISTS view_obs_points;''') db_utils.sql_alter_db('''DROP VIEW IF EXISTS view_obs_lines;''') db_utils.sql_alter_db( '''DELETE FROM views_geometry_columns WHERE view_name IN ('view_obs_points', 'view_obs_lines');''' ) assert not any([ db_utils.verify_table_exists('view_obs_points'), db_utils.verify_table_exists('view_obs_lines') ]) views_geometry_columns = db_utils.sql_load_fr_db( '''SELECT view_name FROM views_geometry_columns WHERE view_name IN ('view_obs_points', 'view_obs_lines') ORDER BY view_name;''' )[1] print(str(views_geometry_columns)) assert views_geometry_columns == [] utils.add_view_obs_points_obs_lines() print(str(mock_messagebar.mock_calls)) assert call.info( bar_msg= 'Views added. Please reload layers (Midvatten>Load default db-layers to qgis or "F7").' ) in mock_messagebar.mock_calls assert all([ db_utils.verify_table_exists('view_obs_points'), db_utils.verify_table_exists('view_obs_lines') ]) views_geometry_columns = db_utils.sql_load_fr_db( '''SELECT view_name FROM views_geometry_columns WHERE view_name IN ('view_obs_points', 'view_obs_lines') ORDER BY view_name;''' )[1] print(str(views_geometry_columns)) assert views_geometry_columns == [('view_obs_lines', ), ('view_obs_points', )]
def calculateaveflow(self): utils.start_waiting_cursor() date_from = self.FromDateTime.dateTime().toPyDateTime() date_to = self.ToDateTime.dateTime().toPyDateTime() #Identify distinct set of obsid and instrumentid with Accvol-data and within the user-defined date_time-interval: sql= """SELECT DISTINCT obsid, instrumentid FROM (SELECT * FROM w_flow WHERE flowtype = 'Accvol' AND date_time >= '%s' AND date_time <= '%s' AND obsid IN (%s))"""%(date_from,date_to, utils.sql_unicode_list(self.observations)) #utils.pop_up_info(sql)#debug uniqueset = db_utils.sql_load_fr_db(sql)[1] # The unique set of obsid and instrumentid is kept in uniqueset negativeflow = False for pyobsid, pyinstrumentid in uniqueset: sql= """select date_time, reading from w_flow where flowtype = 'Accvol' and obsid='%s' and instrumentid='%s' and date_time >='%s' and date_time <='%s' order by date_time"""%(pyobsid,pyinstrumentid,date_from,date_to) recs = db_utils.sql_load_fr_db(sql)[1] """Transform data to a numpy.recarray""" My_format = [('date_time', datetime.datetime), ('values', float)] #Define format with help from function datetime table = np.array(recs, dtype=My_format) #NDARRAY table2=table.view(np.recarray) # RECARRAY Makes the two columns into callable objects, i.e. write table2.values for j, row in enumerate(table2):#This is where Aveflow is calculated for each obs and also written to db if j>0:#first row is "start-value" for Accvol and there is no Aveflow to be calculated Volume = (table2.values[j] - table2.values[j-1])*1000#convert to L since Accvol is supposed to be in m3 """ Get help from function datestr2num to get date and time into float""" DeltaTime = 24*3600*(datestr2num(table2.date_time[j]) - datestr2num(table2.date_time[j-1]))#convert to seconds since numtime is days Aveflow = Volume/DeltaTime#L/s if Aveflow<0: negativeflow = True sql = """insert or ignore into w_flow(obsid,instrumentid,flowtype,date_time,reading,unit) values('%s','%s','Aveflow','%s','%s','l/s')"""%(pyobsid,pyinstrumentid,table2.date_time[j],Aveflow) db_utils.sql_alter_db(sql) if negativeflow: utils.MessagebarAndLog.info(bar_msg=ru(QCoreApplication.translate('Calcave', "Please notice that negative flow was encountered."))) utils.stop_waiting_cursor() self.close()
def test_stratigraphy_gap(self, mock_skippopup, mock_messagebar): """ :param mock_skippopup: :param mock_messagebar: :return: """ db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('1', 5, ST_GeomFromText('POINT(633466 711659)', 3006))''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('2', 10, ST_GeomFromText('POINT(6720727 016568)', 3006))''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('3', 20, ST_GeomFromText('POINT(6720728 016569)', 3006))''') db_utils.sql_alter_db('''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geology, geoshort, capacity, development) VALUES ('1', 1, 0, 1, 'sand', 'sand', '3', 'j')''') db_utils.sql_alter_db('''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geology, geoshort, capacity, development) VALUES ('1', 2, 2, 4.5, 'morän', 'morän', '3', 'j')''') self.create_and_select_vlayer() print(str(self.vlayer.isValid())) print(str(db_utils.sql_load_fr_db('select * from obs_points'))) print(str(db_utils.sql_load_fr_db('select * from stratigraphy'))) dlg = Stratigraphy(self.iface, self.vlayer, self.ms.settingsdict) print(str(mock_messagebar.mock_calls)) print(str(mock_skippopup.mock_calls)) dlg.showSurvey() test = utils.anything_to_string_representation(dlg.data) test_survey = utils.anything_to_string_representation(repr(dlg.data['1'])) test_strata = utils.anything_to_string_representation(utils.returnunicode(dlg.data['1'].strata, keep_containers=True)) assert len(mock_skippopup.mock_calls) == 0 assert len(mock_messagebar.mock_calls) == 0 assert test == """{"1": SURVEY('1', 5.000000, '<QgsPointXY: POINT(633466 711659)>')}""" assert test_survey == '''"SURVEY('1', 5.000000, '<QgsPointXY: POINT(633466 711659)>')"''' assert test_strata == '''["strata(1, '3', 'sand', 'sand', 0.000000-1.000000)", "strata(2, '', '', '', 1.000000-2.000000)", "strata(3, '3', 'morän', 'moran', 2.000000-4.500000)"]'''
def get_selected_obstypes(self): sql = "select obsid, type from obs_points where obsid in ({})".format( utils.sql_unicode_list(self.observations)) ConnOK, types = db_utils.sql_load_fr_db(sql) self.typedict = dict(types) #make it a dictionary sql = "select distinct type from obs_points where obsid in ({})".format( utils.sql_unicode_list(self.observations)) ConnOK, self.distincttypes = db_utils.sql_load_fr_db(sql)
def test_interlab4_connection_table_only_1(self): db_utils.sql_alter_db('''INSERT INTO zz_staff (staff) VALUES ('DV')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('obsid1')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('obsid2')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('anobsid')''') db_utils.sql_alter_db('''INSERT INTO zz_interlab4_obsid_assignment (specifik_provplats, provplatsnamn, obsid) VALUES ('Demo', 'Demo1 vattenverk', 'obsid1')''') interlab4_lines = ( '#Interlab', '#Version=4.0', '#Tecken=UTF-8', '#Textavgränsare=Nej', '#Decimaltecken=,', '#Provadm', 'Lablittera;Namn;Adress;Postnr;Ort;Kommunkod;Projekt;Laboratorium;Provtyp;Provtagare;Registertyp;ProvplatsID;Provplatsnamn;Specifik provplats;Provtagningsorsak;Provtyp;Provtypspecifikation;Bedömning;Kemisk bedömning;Mikrobiologisk bedömning;Kommentar;År;Provtagningsdatum;Provtagningstid;Inlämningsdatum;Inlämningstid;', 'DM-990908-2773;MFR;PG Vejdes väg 15;351 96;Växjö;0780;Demoproj;Demo-Laboratoriet;NSG;DV;;;Demo1 vattenverk;Demo;;Dricksvatten enligt SLVFS 2001:30;Utgående;Nej;Tjänligt;;;2010;2010-09-07;10:15;2010-09-07;14:15;', 'DM-990908-2774;MFR;PG Vejdes väg 15;351 96;Växjö;0780;Demoproj;Demo-Laboratoriet;NSG;DV;;;Demo2 vattenverk;Demo;;Dricksvatten enligt SLVFS 2001:30;Utgående;Nej;Tjänligt;;;2010;2010-09-07;10:15;2010-09-07;14:15;', '#Provdat', 'Lablittera;Metodbeteckning;Parameter;Mätvärdetext;Mätvärdetal;Mätvärdetalanm;Enhet;Rapporteringsgräns;Detektionsgräns;Mätosäkerhet;Mätvärdespår;Parameterbedömning;Kommentar;', 'DM-990908-2773;SS-EN ISO 7887-1/4;Kalium;<2,5;2,5;;mg/l Pt;;;;;;;', 'DM-990908-2773;SS-EN ISO 7887-1/4;Kalium;<1;1;;mg/l Pt;;;;;;;', 'DM-990908-2774;SS-EN ISO 7887-1/4;Kalium;<15;15;;mg/l Pt;;;;;;;', '#S**t' ) with utils.tempinput('\n'.join(interlab4_lines), 'utf-8') as filename: @mock.patch('midvatten_utils.NotFoundQuestion') @mock.patch('import_data_to_db.utils.Askuser', mocks_for_tests.mock_askuser.get_v) @mock.patch('qgis.utils.iface', autospec=True) @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True) @mock.patch('import_data_to_db.qgis.PyQt.QtWidgets.QFileDialog.getOpenFileNames') def _test(self, filename, mock_filenames, mock_skippopup, mock_iface, mock_not_found_question): mock_not_found_question.return_value.answer = 'ok' mock_not_found_question.return_value.value = 'anobsid' mock_not_found_question.return_value.reuse_column = 'obsid' mock_filenames.return_value = [[filename]] importer = Interlab4Import(self.iface.mainWindow(), self.midvatten.ms) importer.init_gui() importer.select_files_button.click() importer.use_obsid_assignment_table.setChecked(True) importer.start_import_button.click() _test(self, filename) test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('''SELECT * FROM w_qual_lab''')) reference_string = r'''(True, [(obsid1, None, DM-990908-2773, Demoproj, DV, 2010-09-07 10:15:00, SS-EN ISO 7887-1/4, Kalium, 2.5, <2,5, mg/l Pt, provtyp: Dricksvatten enligt SLVFS 2001:30. provtypspecifikation: Utgående. bedömning: Nej. kemisk bedömning: Tjänligt. provplatsnamn: Demo1 vattenverk. specifik provplats: Demo), (obsid1, None, DM-990908-2773, Demoproj, DV, 2010-09-07 10:15:00, SS-EN ISO 7887-1/4, Kalium (dubblett 1), 1.0, <1, mg/l Pt, provtyp: Dricksvatten enligt SLVFS 2001:30. provtypspecifikation: Utgående. bedömning: Nej. kemisk bedömning: Tjänligt. provplatsnamn: Demo1 vattenverk. specifik provplats: Demo), (anobsid, None, DM-990908-2774, Demoproj, DV, 2010-09-07 10:15:00, SS-EN ISO 7887-1/4, Kalium, 15.0, <15, mg/l Pt, provtyp: Dricksvatten enligt SLVFS 2001:30. provtypspecifikation: Utgående. bedömning: Nej. kemisk bedömning: Tjänligt. provplatsnamn: Demo2 vattenverk. specifik provplats: Demo)])''' print(reference_string) print(test_string) assert test_string == reference_string test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('''SELECT * FROM zz_interlab4_obsid_assignment''')) reference_string = '(True, [(Demo, Demo1 vattenverk, obsid1), (Demo, Demo2 vattenverk, anobsid)])' assert test_string == reference_string
def get_selected_obstypes(self): sql = "select obsid, type from obs_points where obsid in " + str( self.observations).encode('utf-8').replace('[', '(').replace( ']', ')') ConnOK, types = db_utils.sql_load_fr_db(sql) self.typedict = dict(types) #make it a dictionary sql = "select distinct type from obs_points where obsid in " + str( self.observations).encode('utf-8').replace('[', '(').replace( ']', ')') ConnOK, self.distincttypes = db_utils.sql_load_fr_db(sql)
def test_interlab4_full_test_to_db_staff_0(self): db_utils.sql_alter_db( u'''INSERT INTO obs_points (obsid) VALUES ('anobsid')''') interlab4_lines = ( '#Interlab', '#Version=4.0', '#Tecken=UTF-8', '#Textavgränsare=Nej', '#Decimaltecken=,', '#Provadm', 'Lablittera;Namn;Adress;Postnr;Ort;Kommunkod;Projekt;Laboratorium;Provtyp;Provtagare;Registertyp;ProvplatsID;Provplatsnamn;Specifik provplats;Provtagningsorsak;Provtyp;Provtypspecifikation;Bedömning;Kemisk bedömning;Mikrobiologisk bedömning;Kommentar;År;Provtagningsdatum;Provtagningstid;Inlämningsdatum;Inlämningstid;', 'DM-990908-2773;MFR;PG Vejdes väg 15;351 96;Växjö;0780;Demoproj;Demo-Laboratoriet;NSG;0;;Demo1 vattenverk;;Föreskriven regelbunden undersökning enligt SLVFS 2001:30;Dricksvatten enligt SLVFS 2001:30;Utgående;Nej;Tjänligt;;;;2010;2010-09-07;10:15;2010-09-07;14:15;', '#Provdat', 'Lablittera;Metodbeteckning;Parameter;Mätvärdetext;Mätvärdetal;Mätvärdetalanm;Enhet;Rapporteringsgräns;Detektionsgräns;Mätosäkerhet;Mätvärdespår;Parameterbedömning;Kommentar;', 'DM-990908-2773;SS-EN ISO 7887-1/4;Kalium;<2,5;2,5;;mg/l Pt;;;;;;;', 'DM-990908-2773;SS-EN ISO 7887-1/4;Kalium;<1;1;;mg/l Pt;;;;;;;', '#S**t') with utils.tempinput(u'\n'.join(interlab4_lines), 'utf-8') as filename: @mock.patch('midvatten_utils.NotFoundQuestion') @mock.patch('db_utils.QgsProject.instance', utils_for_tests.MidvattenTestSpatialiteNotCreated. mock_instance_settings_database) @mock.patch('import_data_to_db.utils.Askuser', mocks_for_tests.mock_askuser.get_v) @mock.patch('qgis.utils.iface', autospec=True) @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True) @mock.patch( 'import_data_to_db.qgis.PyQt.QtWidgets.QFileDialog.getOpenFileNames' ) def _test(self, filename, mock_filenames, mock_skippopup, mock_iface, mock_not_found_question): mock_not_found_question.return_value.answer = 'ok' mock_not_found_question.return_value.value = 'anobsid' mock_not_found_question.return_value.reuse_column = 'obsid' mock_filenames.return_value = [[filename]] importer = Interlab4Import(self.iface.mainWindow(), self.ms) importer.parse_observations_and_populate_gui() importer.start_import( importer.all_lab_results, importer.metadata_filter.get_selected_lablitteras()) _test(self, filename) test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db(u'''SELECT * FROM w_qual_lab''')) reference_string = '''(True, [(anobsid, None, DM-990908-2773, Demoproj, 0, 2010-09-07 10:15:00, SS-EN ISO 7887-1/4, Kalium, 2.5, <2,5, mg/l Pt, provtagningsorsak: Dricksvatten enligt SLVFS 2001:30. provtyp: Utgående. provtypspecifikation: Nej. bedömning: Tjänligt. provplatsid: Demo1 vattenverk. specifik provplats: Föreskriven regelbunden undersökning enligt SLVFS 2001:30), (anobsid, None, DM-990908-2773, Demoproj, 0, 2010-09-07 10:15:00, SS-EN ISO 7887-1/4, Kalium (dubblett 1), 1.0, <1, mg/l Pt, provtagningsorsak: Dricksvatten enligt SLVFS 2001:30. provtyp: Utgående. provtypspecifikation: Nej. bedömning: Tjänligt. provplatsid: Demo1 vattenverk. specifik provplats: Föreskriven regelbunden undersökning enligt SLVFS 2001:30)])''' print(reference_string) print(test_string) assert test_string == reference_string test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db(u'''SELECT * FROM zz_staff''')) reference_string = '(True, [(0, None)])' assert test_string == reference_string
def test_add_view_obs_points_obs_lines_readd(self, mock_messagebar): assert all([db_utils.verify_table_exists('view_obs_points'), db_utils.verify_table_exists('view_obs_lines')]) views_geometry_columns = db_utils.sql_load_fr_db('''SELECT view_name FROM views_geometry_columns WHERE view_name IN ('view_obs_points', 'view_obs_lines') ORDER BY view_name;''')[1] print(str(views_geometry_columns)) assert views_geometry_columns == [('view_obs_lines',), ('view_obs_points',)] utils.add_view_obs_points_obs_lines() print(str(mock_messagebar.mock_calls)) assert call.info(bar_msg='Views added. Please reload layers (Midvatten>Load default db-layers to qgis or "F7").') in mock_messagebar.mock_calls assert all([db_utils.verify_table_exists('view_obs_points'), db_utils.verify_table_exists('view_obs_lines')]) views_geometry_columns = db_utils.sql_load_fr_db('''SELECT view_name FROM views_geometry_columns WHERE view_name IN ('view_obs_points', 'view_obs_lines') ORDER BY view_name;''')[1] print(str(views_geometry_columns)) assert views_geometry_columns == [('view_obs_lines',), ('view_obs_points',)]
def test_calc_selected_dont_overwrite_dont_skip_nulls( self, mock_selected_obsids, mock_messagebar, mock_skippopup): mock_selected_obsids.return_value = [u'rb1', u'rb2'] db_utils.sql_alter_db( u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''') db_utils.sql_alter_db( u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''' ) db_utils.sql_alter_db( u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', NULL)''') db_utils.sql_alter_db( u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')''' ) db_utils.sql_alter_db( u'''INSERT into w_levels (obsid, meas, level_masl, date_time) VALUES ('rb2', 555, 667, '2005-01-02 00:00:00')''' ) self.calclvl.FromDateTime = QtGui.QDateTimeEdit() self.calclvl.FromDateTime.setDateTime( datestring_to_date(u'2000-01-01 00:00:00')) self.calclvl.ToDateTime = QtGui.QDateTimeEdit() self.calclvl.ToDateTime.setDateTime( datestring_to_date(u'2010-01-01 00:00:00')) self.calclvl.checkBox_overwrite_prev.setChecked(False) self.calclvl.calcselected() #self.checkBox_skipnulls test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db( u'SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid, date_time' )) reference_string = u'(True, [(rb1, 2005-01-01 00:00:00, 222.0, None, None), (rb2, 2005-01-01 00:00:00, 444.0, None, None), (rb2, 2005-01-02 00:00:00, 555.0, None, 667.0)])' print(str(mock_messagebar.mock_calls)) print(test_string) assert test_string == reference_string
def get_piper_data(self): #These observations are supposed to be in mg/l and must be stored in a Midvatten database, table w_qual_lab sql = self.big_sql() print(sql) #debug # get data into a list: obsid, date_time, type, Cl_meqPl, HCO3_meqPl, SO4_meqPl, Na+K_meqPl, Ca_meqPl, Mg_meqPl obsimport = db_utils.sql_load_fr_db(sql)[1] #convert to numpy ndarray W/O format specified self.obsnp_nospecformat = np.array(obsimport) #define format """ some problems with string fields np.str_ My_format = [('obsid', str), My_format = [('obsid', unicode), My_format = [('obsid', np.dtype('a35')), My_format = [('obsid', np.dtype(np.str_)), My_format = [('obsid', np.str_), My_format = [('obsid', object), none is working besides from 'a35' which limits string length to 35 characters least bad is the "object" type, then everything is loaded, but all strings as unicode strings which _should_ be ok """ My_format = [('obsid', object), ('date_time', datetime.datetime), ('obstype', object), ('Cl_meqPl', float), ('HCO3_meqPl', float), ('SO4_meqPl', float), ('NaK_meqPl', float), ('Ca_meqPl', float), ('Mg_meqPl', float)] #convert to numpy ndarray W format specified - i.e. a structured array self.obsnp_specified_format = np.array(obsimport, dtype=My_format) #convert to np recarray - takes the structured array and makes the columns into callable objects, i.e. write table2.Cl_meqPl self.obsrecarray = self.obsnp_specified_format.view(np.recarray)
def test_wlvllogg_import_from_diveroffice_files_skip_missing_water_level(self): files = [('Location=rb1', 'Date/time,Water head[cm],Temperature[°C]', '2016/03/15 10:30:00,1,10', '2016/03/15 11:00:00,,101'), ('Location=rb2', 'Date/time,Water head[cm],Temperature[°C]', '2016/04/15 10:30:00,2,20', '2016/04/15 11:00:00,21,201'), ('Location=rb3', 'Date/time,Water head[cm],Temperature[°C],Conductivity[mS/cm]', '2016/05/15 10:30:00,3,30,5', '2016/05/15 11:00:00,31,301,6') ] db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('rb1')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('rb2')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('rb3')''') db_utils.sql_alter_db('''INSERT INTO w_levels_logger (obsid, date_time, head_cm) VALUES ('rb1', '2016-03-15 10:31', '5.0')''') DiverofficeImport.charsetchoosen = 'utf-8' with utils.tempinput('\n'.join(files[0]), DiverofficeImport.charsetchoosen) as f1: with utils.tempinput('\n'.join(files[1]), DiverofficeImport.charsetchoosen) as f2: with utils.tempinput('\n'.join(files[2]), DiverofficeImport.charsetchoosen) as f3: filenames = [f1, f2, f3] @mock.patch('import_data_to_db.utils.NotFoundQuestion') @mock.patch('db_utils.QgsProject.instance', utils_for_tests.MidvattenTestPostgisNotCreated.mock_instance_settings_database) @mock.patch('db_utils.get_postgis_connections', utils_for_tests.MidvattenTestPostgisNotCreated.mock_postgis_connections) @mock.patch('import_data_to_db.utils.Askuser') @mock.patch('qgis.utils.iface', autospec=True) @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText') @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True) @mock.patch('import_data_to_db.utils.select_files') def _test_wlvllogg_import_from_diveroffice_files(self, filenames, mock_filenames, mock_skippopup, mock_encoding, mock_iface, mock_askuser, mock_notfoundquestion): mock_notfoundquestion.return_value.answer = 'ok' mock_notfoundquestion.return_value.value = 'rb1' mock_notfoundquestion.return_value.reuse_column = 'location' mock_filenames.return_value = filenames mock_encoding.return_value = ['utf-8'] ms = MagicMock() ms.settingsdict = OrderedDict() importer = DiverofficeImport(self.iface.mainWindow(), ms) importer.select_files_and_load_gui() importer.import_all_data.checked = True importer.confirm_names.checked = False importer.skip_rows.checked = True importer.start_import(importer.files, importer.skip_rows.checked, importer.confirm_names.checked, importer.import_all_data.checked) _test_wlvllogg_import_from_diveroffice_files(self, filenames) test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger''')) reference_string = r'''(True, [(rb1, 2016-03-15 10:31, 5.0, None, None, None, None), (rb1, 2016-03-15 10:30:00, 1.0, 10.0, None, None, None), (rb2, 2016-04-15 10:30:00, 2.0, 20.0, None, None, None), (rb2, 2016-04-15 11:00:00, 21.0, 201.0, None, None, None), (rb3, 2016-05-15 10:30:00, 3.0, 30.0, 5.0, None, None), (rb3, 2016-05-15 11:00:00, 31.0, 301.0, 6.0, None, None)])''' assert test_string == reference_string
def get_piper_data(self): #These observations are supposed to be in mg/l and must be stored in a Midvatten database, table w_qual_lab sql = self.big_sql() try: print(sql)#debug except: pass # get data into a list: obsid, date_time, type, Cl_meqPl, HCO3_meqPl, SO4_meqPl, Na+K_meqPl, Ca_meqPl, Mg_meqPl obsimport = db_utils.sql_load_fr_db(sql)[1] #convert to numpy ndarray W/O format specified self.obsnp_nospecformat = np.array(obsimport) #define format """ some problems with string fields np.str_ My_format = [('obsid', str), My_format = [('obsid', unicode), My_format = [('obsid', np.dtype('a35')), My_format = [('obsid', np.dtype(np.str_)), My_format = [('obsid', np.str_), My_format = [('obsid', object), none is working besides from 'a35' which limits string length to 35 characters least bad is the "object" type, then everything is loaded, but all strings as unicode strings which _should_ be ok """ My_format = [('obsid', object), ('date_time', datetime.datetime),('obstype', object),('Cl_meqPl', float),('HCO3_meqPl', float),('SO4_meqPl', float),('NaK_meqPl', float),('Ca_meqPl', float),('Mg_meqPl', float)] #convert to numpy ndarray W format specified - i.e. a structured array self.obsnp_specified_format = np.array(obsimport, dtype=My_format) #convert to np recarray - takes the structured array and makes the columns into callable objects, i.e. write table2.Cl_meqPl self.obsrecarray=self.obsnp_specified_format.view(np.recarray)
def test_wlvllogg_import_from_diveroffice_files_cancel(self): files = [('Location=rb2', 'Date/time,Water head[cm],Temperature[°C]', '2016/03/15 10:30:00,1,10', '2016/03/15 11:00:00,11,101')] db_utils.sql_alter_db( '''INSERT INTO obs_points (obsid) VALUES ('Rb1')''') DiverofficeImport.charsetchoosen = 'utf-8' with utils.tempinput('\n'.join(files[0]), DiverofficeImport.charsetchoosen) as f1: filenames = [f1] utils_askuser_answer_no_obj = MockUsingReturnValue(None) utils_askuser_answer_no_obj.result = 0 utils_askuser_answer_no = MockUsingReturnValue( utils_askuser_answer_no_obj) @mock.patch('import_data_to_db.utils.NotFoundQuestion') @mock.patch('db_utils.QgsProject.instance', utils_for_tests.MidvattenTestPostgisNotCreated. mock_instance_settings_database) @mock.patch('db_utils.get_postgis_connections', utils_for_tests.MidvattenTestPostgisNotCreated. mock_postgis_connections) @mock.patch('import_data_to_db.utils.Askuser') @mock.patch('qgis.utils.iface', autospec=True) @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText') @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True) @mock.patch('import_data_to_db.utils.select_files') def _test_wlvllogg_import_from_diveroffice_files( self, filenames, mock_filenames, mock_skippopup, mock_encoding, mock_iface, mock_askuser, mock_notfoundquestion): mock_notfoundquestion.return_value.answer = 'cancel' mock_notfoundquestion.return_value.value = 'rb1' mock_notfoundquestion.return_value.reuse_column = 'location' mock_filenames.return_value = filenames mock_encoding.return_value = ['utf-8'] ms = MagicMock() ms.settingsdict = OrderedDict() importer = DiverofficeImport(self.iface.mainWindow(), ms) importer.select_files_and_load_gui() importer.import_all_data.checked = True importer.confirm_names.checked = False answer = importer.start_import( importer.files, importer.skip_rows.checked, importer.confirm_names.checked, importer.import_all_data.checked) return answer answer = _test_wlvllogg_import_from_diveroffice_files( self, filenames) test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db( '''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger''' )) reference_string = r'''(True, [])''' assert test_string == reference_string
def test_calselected(self, mock_messagebar, mock_getselectedobjectnames, mock_iface): mock_getselectedobjectnames.return_value = ['1'] db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('1')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('2')''') db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-02 00:00', 2.0, 'm3')''') db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-01 00:00', 1.0, 'm3')''') db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-04 00:00', 10.0, 'm3')''') db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('1', 'inst1', 'Accvol', '2019-02-03 00:00', 5.0, 'm3')''') db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('2', 'inst2', 'Accvol', '2019-02-04 00:00', 10.0, 'm3')''') db_utils.sql_alter_db('''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, reading, unit) VALUES ('2', 'inst2', 'Accvol', '2019-02-03 00:00', 5.0, 'm3')''') widget = QtWidgets.QWidget() calcave = w_flow_calc_aveflow.Calcave(widget) calcave.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00')) calcave.calcselected() print(str(mock_messagebar.mock_calls)) #insert or ignore into w_flow(obsid,instrumentid,flowtype,date_time,reading,unit) values('%s','%s','Aveflow','%s','%s','l/s') res = db_utils.sql_load_fr_db('''SELECT obsid, instrumentid, flowtype, date_time, ROUND(reading, 4), unit FROM w_flow ORDER BY obsid, flowtype, date_time''')[1] test = utils.anything_to_string_representation(res) print(test) reference = '[("1", "inst1", "Accvol", "2019-02-01 00:00", 1.0, "m3", ), ("1", "inst1", "Accvol", "2019-02-02 00:00", 2.0, "m3", ), ("1", "inst1", "Accvol", "2019-02-03 00:00", 5.0, "m3", ), ("1", "inst1", "Accvol", "2019-02-04 00:00", 10.0, "m3", ), ("1", "inst1", "Aveflow", "2019-02-02 00:00", 0.0116, "l/s", ), ("1", "inst1", "Aveflow", "2019-02-03 00:00", 0.0347, "l/s", ), ("1", "inst1", "Aveflow", "2019-02-04 00:00", 0.0579, "l/s", ), ("2", "inst2", "Accvol", "2019-02-03 00:00", 5.0, "m3", ), ("2", "inst2", "Accvol", "2019-02-04 00:00", 10.0, "m3", )]' #result_list = self.calcave.observations #reference_list = ['1', '2'] assert test == reference
def LoadDistinctPiperParams(self): self.ClearPiperParams() #Dict not implemented yet. lab_parameters = {} if lab_parameters: for param_list in [self.paramCl, self.paramHCO3, self.paramSO4, self.paramNa, self.paramK, self.paramCa, self.paramMg]: new_list = [''] new_list.extend(sorted(lab_parameters.keys())) param_list.addItems(new_list) else: connection_ok, result = db_utils.sql_load_fr_db(r"""SELECT DISTINCT parameter FROM w_qual_lab ORDER BY parameter""") if connection_ok: self.paramCl.addItem('') self.paramHCO3.addItem('') self.paramSO4.addItem('') self.paramNa.addItem('') self.paramK.addItem('') self.paramCa.addItem('') self.paramMg.addItem('') for row in result: self.paramCl.addItem(row[0]) self.paramHCO3.addItem(row[0]) self.paramSO4.addItem(row[0]) self.paramNa.addItem(row[0]) self.paramK.addItem(row[0]) self.paramCa.addItem(row[0]) self.paramMg.addItem(row[0])
def test_calibrlogger_adjust_trend(self, mock_messagebar): db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)") db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 200)") db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 200)") db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 100)") calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)') calibrlogger.update_plot() calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00')) calibrlogger.L1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00')) calibrlogger.L2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00')) calibrlogger.M1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00')) calibrlogger.M2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00')) calibrlogger.adjust_trend_func() res = db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger') l = list(res[1][1]) l[5] = '%.11e'%Decimal(l[5]) res[1][1] = tuple(l) test = utils_for_tests.create_test_string(res) print(mock_messagebar.mock_calls) print(test) ref = '(True, [(rb1, 2017-02-01 00:00, None, None, None, 100.0, None), (rb1, 2017-02-10 00:00, None, None, None, -2.84217094304e-14, None)])' assert test == ref
def test_calibrlogger_adjust_trend(self, mock_messagebar): db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)") db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 200)") db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 200)") db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-10 00:00', 100)") calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)') calibrlogger.update_plot() calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00')) calibrlogger.L1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00')) calibrlogger.L2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00')) calibrlogger.M1_date.setDateTime(date_utils.datestring_to_date('2017-02-01 00:00')) calibrlogger.M2_date.setDateTime(date_utils.datestring_to_date('2017-02-10 00:00')) calibrlogger.L1_level.setText('100') calibrlogger.L2_level.setText('200') calibrlogger.M1_level.setText('200') calibrlogger.M2_level.setText('100') calibrlogger.adjust_trend_func() res = db_utils.sql_load_fr_db('SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger') l = list(res[1][1]) l[5] = '%.11e'%Decimal(l[5]) res[1][1] = tuple(l) test = utils_for_tests.create_test_string(res) print(mock_messagebar.mock_calls) ref = '(True, [(rb1, 2017-02-01 00:00, None, None, None, 100.0, None), (rb1, 2017-02-10 00:00, None, None, None, -2.84217094304e-14, None)])' print("Ref") print(ref) print("Test") print(test) assert test == ref
def remove_layers(self): try: #qgis>2.6 remove_group = self.root.findGroup(self.group_name) self.root.removeChildNode(remove_group) except: #qgis < 2.4 """ FIRST search for and try to remove old layers """ ALL_LAYERS = self.iface.mapCanvas().layers() if self.group_name == 'Midvatten_OBS_DB': for lyr in ALL_LAYERS: name = lyr.name() if (name in self.default_layers) or ( name in self.default_nonspatlayers): QgsMapLayerRegistry.instance().removeMapLayers( [lyr.id()]) """ THEN remove old group """ elif self.group_name == 'Midvatten_data_domains': conn_ok, dd_tables = db_utils.sql_load_fr_db( "select name from sqlite_master where name like 'zz_%'") if not conn_ok: return d_domain_tables = [str(dd_table[0]) for dd_table in dd_tables] for lyr in ALL_LAYERS: name = lyr.name() if name in d_domain_tables: QgsMapLayerRegistry.instance().removeMapLayers( [lyr.id()]) while self.group_name in self.legend.groups(): group_index = self.legend.groups().index(self.group_name) self.legend.removeGroup(group_index)
def test_calc_selected(self, mock_selected_obsids): mock_selected_obsids.return_value = [u'rb1'] db_utils.sql_alter_db( u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''') db_utils.sql_alter_db( u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''' ) db_utils.sql_alter_db( u'''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', 4)''') db_utils.sql_alter_db( u'''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')''' ) self.calclvl.FromDateTime = QtGui.QDateTimeEdit() self.calclvl.FromDateTime.setDateTime( datestring_to_date(u'2000-01-01 00:00:00')) self.calclvl.ToDateTime = QtGui.QDateTimeEdit() self.calclvl.ToDateTime.setDateTime( datestring_to_date(u'2010-01-01 00:00:00')) self.calclvl.calcselected() test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db( u'SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid' )) reference_string = u'(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0), (rb2, 2005-01-01 00:00:00, 444.0, None, None)])' assert test_string == reference_string
def test_calibrlogger_set_log_pos(self, mock_messagebar): db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db( "INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)" ) db_utils.sql_alter_db( "INSERT INTO w_levels_logger (obsid, date_time, head_cm) VALUES ('rb1', '2017-02-01 00:00', 100)" ) calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) calibrlogger.update_plot() calibrlogger.FromDateTime.setDateTime( date_utils.datestring_to_date('2000-01-01 00:00:00')) calibrlogger.LoggerPos.setText('2') gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)') calibrlogger.set_logger_pos() print(str(mock_messagebar.mock_calls)) test = utils_for_tests.create_test_string( db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger')) ref = '(True, [(rb1, 2017-02-01 00:00, 100.0, None, None, 3.0, None)])' print(test) assert test == ref
def test_calibrlogger_calc_best_fit_add_no_matches_same_to_date( self, mock_messagebar, skip_popup): db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db( "INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)" ) db_utils.sql_alter_db( "INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 01:00', 50)" ) calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) calibrlogger.update_plot() calibrlogger.loggerpos_masl_or_offset_state = 2 calibrlogger.FromDateTime.setDateTime( date_utils.datestring_to_date('2010-02-01 01:00')) calibrlogger.ToDateTime.setDateTime( date_utils.datestring_to_date('2017-02-01 01:00')) gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)') calibrlogger.bestFitSearchRadius.setText('2 hours') calibrlogger.calc_best_fit() test = utils_for_tests.create_test_string( db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger')) ref = '(True, [(rb1, 2017-02-01 01:00, None, None, None, 50.0, None)])' print(test) assert test == ref
def calcall(self): obsar = db_utils.sql_load_fr_db( 'select distinct obsid from w_flow where flowtype="Accvol"')[1] self.observations = [ str(obs[0]) for obs in obsar ] #we cannot send unicode as string to sql because it would include the u' self.calculateaveflow()
def LoadDistinctPiperParams(self): self.ClearPiperParams() #Dict not implemented yet. lab_parameters = {} if lab_parameters: for param_list in [ self.paramCl, self.paramHCO3, self.paramSO4, self.paramNa, self.paramK, self.paramCa, self.paramMg ]: new_list = [''] new_list.extend(sorted(lab_parameters.keys())) param_list.addItems(new_list) else: connection_ok, result = db_utils.sql_load_fr_db( r"""SELECT DISTINCT parameter FROM w_qual_lab ORDER BY parameter""" ) if connection_ok: self.paramCl.addItem('') self.paramHCO3.addItem('') self.paramSO4.addItem('') self.paramNa.addItem('') self.paramK.addItem('') self.paramCa.addItem('') self.paramMg.addItem('') for row in result: self.paramCl.addItem(row[0]) self.paramHCO3.addItem(row[0]) self.paramSO4.addItem(row[0]) self.paramNa.addItem(row[0]) self.paramK.addItem(row[0]) self.paramCa.addItem(row[0]) self.paramMg.addItem(row[0])
def test_interlab4_full_test_to_db_staff_0(self): db_utils.sql_alter_db(u'''INSERT INTO obs_points (obsid) VALUES ('anobsid')''') interlab4_lines = ( '#Interlab', '#Version=4.0', '#Tecken=UTF-8', '#Textavgränsare=Nej', '#Decimaltecken=,', '#Provadm', 'Lablittera;Namn;Adress;Postnr;Ort;Kommunkod;Projekt;Laboratorium;Provtyp;Provtagare;Registertyp;ProvplatsID;Provplatsnamn;Specifik provplats;Provtagningsorsak;Provtyp;Provtypspecifikation;Bedömning;Kemisk bedömning;Mikrobiologisk bedömning;Kommentar;År;Provtagningsdatum;Provtagningstid;Inlämningsdatum;Inlämningstid;', 'DM-990908-2773;MFR;PG Vejdes väg 15;351 96;Växjö;0780;Demoproj;Demo-Laboratoriet;NSG;0;;Demo1 vattenverk;;Föreskriven regelbunden undersökning enligt SLVFS 2001:30;Dricksvatten enligt SLVFS 2001:30;Utgående;Nej;Tjänligt;;;;2010;2010-09-07;10:15;2010-09-07;14:15;', '#Provdat', 'Lablittera;Metodbeteckning;Parameter;Mätvärdetext;Mätvärdetal;Mätvärdetalanm;Enhet;Rapporteringsgräns;Detektionsgräns;Mätosäkerhet;Mätvärdespår;Parameterbedömning;Kommentar;', 'DM-990908-2773;SS-EN ISO 7887-1/4;Kalium;<2,5;2,5;;mg/l Pt;;;;;;;', 'DM-990908-2773;SS-EN ISO 7887-1/4;Kalium;<1;1;;mg/l Pt;;;;;;;', '#S**t' ) with utils.tempinput(u'\n'.join(interlab4_lines), 'utf-8') as filename: @mock.patch('midvatten_utils.NotFoundQuestion') @mock.patch('db_utils.QgsProject.instance', utils_for_tests.MidvattenTestSpatialiteNotCreated.mock_instance_settings_database) @mock.patch('import_data_to_db.utils.Askuser', mocks_for_tests.mock_askuser.get_v) @mock.patch('qgis.utils.iface', autospec=True) @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True) @mock.patch('import_data_to_db.qgis.PyQt.QtWidgets.QFileDialog.getOpenFileNames') def _test(self, filename, mock_filenames, mock_skippopup, mock_iface, mock_not_found_question): mock_not_found_question.return_value.answer = 'ok' mock_not_found_question.return_value.value = 'anobsid' mock_not_found_question.return_value.reuse_column = 'obsid' mock_filenames.return_value = [[filename]] importer = Interlab4Import(self.iface.mainWindow(), self.ms) importer.parse_observations_and_populate_gui() importer.start_import(importer.all_lab_results, importer.metadata_filter.get_selected_lablitteras()) _test(self, filename) test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db(u'''SELECT * FROM w_qual_lab''')) reference_string = '''(True, [(anobsid, None, DM-990908-2773, Demoproj, 0, 2010-09-07 10:15:00, SS-EN ISO 7887-1/4, Kalium, 2.5, <2,5, mg/l Pt, provtagningsorsak: Dricksvatten enligt SLVFS 2001:30. provtyp: Utgående. provtypspecifikation: Nej. bedömning: Tjänligt. provplatsid: Demo1 vattenverk. specifik provplats: Föreskriven regelbunden undersökning enligt SLVFS 2001:30), (anobsid, None, DM-990908-2773, Demoproj, 0, 2010-09-07 10:15:00, SS-EN ISO 7887-1/4, Kalium (dubblett 1), 1.0, <1, mg/l Pt, provtagningsorsak: Dricksvatten enligt SLVFS 2001:30. provtyp: Utgående. provtypspecifikation: Nej. bedömning: Tjänligt. provplatsid: Demo1 vattenverk. specifik provplats: Föreskriven regelbunden undersökning enligt SLVFS 2001:30)])''' print(reference_string) print(test_string) assert test_string == reference_string test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db(u'''SELECT * FROM zz_staff''')) reference_string = '(True, [(0, None)])' assert test_string == reference_string
def test_prepare_qgis2threejs(self, mock_iface, mock_messagebar): self.init_qgis() dbconnection = db_utils.DbConnectionManager() dbconnection.execute( '''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('1', 1, ST_GeomFromText('POINT(1 1)', 3006)); ''' ) dbconnection.execute( '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geoshort) VALUES ('1', 1, 0, 1, 'torv'); ''' ) dbconnection.execute( '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geoshort) VALUES ('1', 2, 1, 2, 'fyll'); ''' ) dbconnection.commit_and_closedb() #print(str(db_utils.sql_load_fr_db('''SELECT * FROM stratigraphy;'''))) canvas = MagicMock() mock_iface.mapCanvas.return_value = canvas self.midvatten.prepare_layers_for_qgis2threejs() layers = [ 'strat_torv', 'strat_fyll', 'strat_lera', 'strat_silt', 'strat_finsand', 'strat_mellansand', 'strat_sand', 'strat_grovsand', 'strat_fingrus', 'strat_mellangrus', 'strat_grus', 'strat_grovgrus', 'strat_morn', 'strat_berg', 'strat_obs_p_for_qgsi2threejs' ] view_contents = [] for l in layers: if l != 'strat_obs_p_for_qgsi2threejs': view_contents.append( db_utils.sql_load_fr_db( '''SELECT rowid, obsid, z_coord, height, ST_AsText(geometry) FROM {};''' .format(l))[1]) view_contents.append( db_utils.sql_load_fr_db( '''SELECT rowid, obsid, ST_AsText(geometry) FROM {};'''.format( 'strat_obs_p_for_qgsi2threejs'))[1]) test = utils.anything_to_string_representation(view_contents) print(str(test)) ref = '''[[(1, "1", 1.0, -1.0, "POINT(1 1)", )], [(2, "1", 0.0, -1.0, "POINT(1 1)", )], [], [], [], [], [], [], [], [], [], [], [], [], [(1, "1", "POINT(1 1)", )]]''' assert test == ref assert not mock_messagebar.mock_calls
def test_stratigraphy_with_string_obsid(self, mock_skippopup, mock_messagebar): """ :param mock_skippopup: :param mock_messagebar: :return: """ db_utils.sql_alter_db( '''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('P1', 5, ST_GeomFromText('POINT(633466 711659)', 3006))''' ) db_utils.sql_alter_db( '''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('P2', 10, ST_GeomFromText('POINT(6720727 016568)', 3006))''' ) db_utils.sql_alter_db( '''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('P3', 20, ST_GeomFromText('POINT(6720728 016569)', 3006))''' ) db_utils.sql_alter_db( '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geology, geoshort, capacity, development) VALUES ('P1', 1, 0, 1, 'sand', 'sand', '3', 'j')''' ) db_utils.sql_alter_db( '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geology, geoshort, capacity, development) VALUES ('P1', 2, 1, 4.5, 'morän', 'morän', '3', 'j')''' ) self.create_and_select_vlayer() print(str(self.vlayer.isValid())) print(str(db_utils.sql_load_fr_db('select * from obs_points'))) print(str(db_utils.sql_load_fr_db('select * from stratigraphy'))) dlg = Stratigraphy(self.iface, self.vlayer, self.midvatten.ms.settingsdict) print(str(mock_messagebar.mock_calls)) print(str(mock_skippopup.mock_calls)) dlg.showSurvey() test = utils.anything_to_string_representation(dlg.data) test_survey = utils.anything_to_string_representation( repr(dlg.data['P1'])) test_strata = utils.anything_to_string_representation( utils.returnunicode(dlg.data['P1'].strata, keep_containers=True)) assert len(mock_skippopup.mock_calls) == 0 print(str(mock_messagebar.mock_calls)) assert len(mock_messagebar.mock_calls) == 0 assert test == """{"P1": SURVEY('P1', 5.000000, '<QgsPointXY: POINT(633466 711659)>')}""" assert test_survey == '''"SURVEY('P1', 5.000000, '<QgsPointXY: POINT(633466 711659)>')"''' assert test_strata == '''["strata(1, '3', 'sand', 'sand', 0.000000-1.000000)", "strata(2, '3', 'morän', 'moran', 1.000000-4.500000)"]'''
def calcall(self): ok, obsar = db_utils.sql_load_fr_db('''SELECT DISTINCT obsid FROM w_flow WHERE flowtype = 'Accvol' ''') #if not ok: # utils.MessagebarAndLog.critical(bar_msg=) if not obsar: utils.MessagebarAndLog.critical(bar_msg=ru(QCoreApplication.translate('Calcave', "No observations with Accvol found, nothing calculated!"))) return self.observations = [obs[0] for obs in obsar] self.calculateaveflow()
def calcall(self): obsids = db_utils.sql_load_fr_db("""SELECT DISTINCT obsid FROM w_levels""")[1] if obsids: obsids = [x[0] for x in obsids] self.calc(obsids) else: utils.pop_up_info(ru(QCoreApplication.translate('Calclvl', 'Adjustment aborted! No obsids in w_levels.')), ru(QCoreApplication.translate('Calclvl', 'Error')))
def load_obsid_from_db(self): self.combobox_obsid.clear() res = db_utils.sql_load_fr_db("""SELECT DISTINCT obsid, (CASE WHEN level_masl IS NULL AND head_cm IS NOT NULL THEN 'uncalibrated' ELSE 'calibrated' END) AS status FROM w_levels_logger ORDER BY obsid""")[1] all_obsids = {} for row in res: all_obsids.setdefault(row[0], []).append(row[1]) self.combobox_obsid.addItems(sorted(all_obsids)) obsids_with_uncalibrated_data = [_obsid for _obsid, status in all_obsids.items() if 'uncalibrated' in status] self.update_combobox_with_calibration_info(_obsids_with_uncalibrated_data=obsids_with_uncalibrated_data)
def test_prepare_qgis2threejs(self, mock_iface, mock_messagebar): self.init_qgis() dbconnection = db_utils.DbConnectionManager() dbconnection.execute('''INSERT INTO obs_points (obsid, h_gs, geometry) VALUES ('1', 1, ST_GeomFromText('POINT(1 1)', 3006)); ''') dbconnection.execute('''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geoshort) VALUES ('1', 1, 0, 1, 'torv'); ''') dbconnection.execute('''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot, geoshort) VALUES ('1', 2, 1, 2, 'fyll'); ''') dbconnection.commit_and_closedb() #print(str(db_utils.sql_load_fr_db('''SELECT * FROM stratigraphy;'''))) canvas = MagicMock() mock_iface.mapCanvas.return_value = canvas self.midvatten.prepare_layers_for_qgis2threejs() layers = ['strat_torv', 'strat_fyll', 'strat_lera', 'strat_silt', 'strat_finsand', 'strat_mellansand', 'strat_sand', 'strat_grovsand', 'strat_fingrus', 'strat_mellangrus', 'strat_grus', 'strat_grovgrus', 'strat_morn', 'strat_berg', 'strat_obs_p_for_qgsi2threejs'] view_contents = [] for l in layers: if l != 'strat_obs_p_for_qgsi2threejs': view_contents.append(db_utils.sql_load_fr_db('''SELECT rowid, obsid, z_coord, height, ST_AsText(geometry) FROM {};'''.format(l))[1]) view_contents.append( db_utils.sql_load_fr_db('''SELECT rowid, obsid, ST_AsText(geometry) FROM {};'''.format('strat_obs_p_for_qgsi2threejs'))[1]) test = utils.anything_to_string_representation(view_contents) print(str(test)) ref = '''[[(1, "1", 1.0, -1.0, "POINT(1 1)", )], [(2, "1", 0.0, -1.0, "POINT(1 1)", )], [], [], [], [], [], [], [], [], [], [], [], [], [(1, "1", "POINT(1 1)", )]]''' assert test == ref assert not mock_messagebar.mock_calls
def GetStatistics(obsid=''): Statistics_list = [0] * 4 columns = ['meas', 'level_masl'] # default value meas_or_level_masl = 'meas' #number of values, also decide wehter to use meas or level_masl in report for column in columns: sql = r"""select Count(%s) from w_levels where obsid = '%s'""" % ( column, obsid) ConnectionOK, number_of_values = db_utils.sql_load_fr_db(sql) if number_of_values and number_of_values[0][0] > Statistics_list[ 2]: #this will select meas if meas >= level_masl meas_or_level_masl = column Statistics_list[2] = number_of_values[0][0] #min value if meas_or_level_masl == 'meas': sql = r"""select min(meas) from w_levels where obsid = '%s'""" % obsid else: sql = r"""select max(level_masl) from w_levels where obsid = '%s'""" % obsid ConnectionOK, min_value = db_utils.sql_load_fr_db(sql) if min_value: Statistics_list[0] = min_value[0][0] #median value median_value = db_utils.calculate_median_value('w_levels', meas_or_level_masl, obsid) if median_value: Statistics_list[1] = median_value #max value if meas_or_level_masl == 'meas': sql = r"""select max(meas) from w_levels where obsid = '%s' """ % obsid else: sql = r"""select min(level_masl) from w_levels where obsid = '%s' """ % obsid ConnectionOK, max_value = db_utils.sql_load_fr_db(sql) if max_value: Statistics_list[3] = max_value[0][0] return meas_or_level_masl, Statistics_list
def GetStatistics(obsid = ''): Statistics_list = [0]*4 columns = ['meas', 'level_masl'] # default value meas_or_level_masl= 'meas' #number of values, also decide wehter to use meas or level_masl in report for column in columns: sql = r"""select Count(%s) from w_levels where obsid = '%s'"""%(column, obsid) ConnectionOK, number_of_values = db_utils.sql_load_fr_db(sql) if number_of_values and number_of_values[0][0] > Statistics_list[2]:#this will select meas if meas >= level_masl meas_or_level_masl = column Statistics_list[2] = number_of_values[0][0] #min value if meas_or_level_masl=='meas': sql = r"""select min(meas) from w_levels where obsid = '%s'""" % obsid else: sql = r"""select max(level_masl) from w_levels where obsid = '%s'""" % obsid ConnectionOK, min_value = db_utils.sql_load_fr_db(sql) if min_value: Statistics_list[0] = min_value[0][0] #median value median_value = db_utils.calculate_median_value('w_levels', meas_or_level_masl, obsid) if median_value: Statistics_list[1] = median_value #max value if meas_or_level_masl=='meas': sql = r"""select max(meas) from w_levels where obsid = '%s' """ % obsid else: sql = r"""select min(level_masl) from w_levels where obsid = '%s' """ % obsid ConnectionOK, max_value = db_utils.sql_load_fr_db(sql) if max_value: Statistics_list[3] = max_value[0][0] return meas_or_level_masl, Statistics_list
def update_combobox_with_calibration_info(self, obsid=None, _obsids_with_uncalibrated_data=None): """ Adds an " (uncalibrated)" suffix after each obsid containing NULL-values in the column level_masl or removes it if there is no NULL-values. :param obsid: If obsid is given, only that obsid is checked. If not given then all obsids are checked. :param _obsids_with_uncalibrated_data: A list of obsids which are uncalibrated. If only obsid is given, calibration status will be read from database for that obsid. If only _obsids_with_uncalibrated_data is given, all obsids will update status based on that list. If both obsid and _obsids_with_uncalibrated_data are given, only status for that obsid will be updated based _obsids_with_uncalibrated_data. If none is given, all obsids will update status based on result from database. :return: """ uncalibrated_str = ' (uncalibrated)' num_entries = self.combobox_obsid.count() if obsid is None and _obsids_with_uncalibrated_data is None: obsids_with_uncalibrated_data = [row[0] for row in db_utils.sql_load_fr_db("""SELECT DISTINCT obsid FROM w_levels_logger WHERE level_masl IS NULL""")[1]] elif _obsids_with_uncalibrated_data is not None: obsids_with_uncalibrated_data = _obsids_with_uncalibrated_data for idx in range(num_entries): current_obsid = self.combobox_obsid.itemText(idx).replace(uncalibrated_str, '') if obsid is not None: #If obsid was given, only continue loop for that one: if current_obsid != obsid: continue if obsids_with_uncalibrated_data is None: obsids_with_uncalibrated_data = [row[0] for row in db_utils.sql_load_fr_db("""SELECT DISTINCT obsid FROM w_levels_logger WHERE obsid = '%s' AND level_masl IS NULL"""%current_obsid)[1]] if current_obsid in obsids_with_uncalibrated_data: new_text = current_obsid + uncalibrated_str else: new_text = current_obsid self.combobox_obsid.setItemText(idx, new_text)
def GetData(self, obsid = '', tablename='', debug = 'n'): # GetData method that returns a table with water quality data # Load all data in obs_points table sql = r"""select * from """ sql += tablename sql += r""" where obsid = '""" sql += obsid sql += r"""'""" if tablename == 'stratigraphy': sql += r""" order by stratid""" if debug == 'y': utils.pop_up_info(sql) ConnectionOK, data = db_utils.sql_load_fr_db(sql) return ConnectionOK, data
def test_calcall(self): db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''') db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''') self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit() self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00')) self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit() self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00')) self.calclvl.calcall() test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels')) reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0)])' assert test_string == reference_string
def test_calcall(self, mock_messagebar): db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''') db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''') self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit() self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00')) self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit() self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00')) self.calclvl.calcall() test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels')) reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0)])' assert test_string == reference_string
def selection_layer_in_db_or_not(self): #this is not used, it might be if using layer_styles stored in the db sql = r"""select name from sqlite_master where name = 'layer_styles'""" result = db_utils.sql_load_fr_db(sql)[1] if len(result)==0:#if it is an old database w/o styles update_db = utils.Askuser("YesNo", """Your database was created with plugin version < 1.1 when layer styles were not stored in the database. You can update this database to the new standard with layer styles (symbols, colors, labels, input forms etc) stored in the database. This will increase plugin stability and multi-user experience but it will also change the layout of all your forms for entering data into the database. Anyway, an update of the database is recommended. Do you want to add these layer styles now?""", 'Update database with layer styles?') if update_db.result == 1: from .create_db import AddLayerStyles AddLayerStyles() self.add_layers_new_method() else: self.add_layers_old_method() else: self.add_layers_new_method()
def staff_list(): """ :return: A list of staff members from the staff table """ sql = 'SELECT distinct staff from zz_staff' sql_result = db_utils.sql_load_fr_db(sql) connection_ok, result_list = sql_result if not connection_ok: utils.MessagebarAndLog.warning(bar_msg=QCoreApplication.translate('staff_list', 'Sql failed, see log message panel'), log_msg=ru(QCoreApplication.translate('staff_list', 'Failed to get existing staff from staff table from sql %s'))%sql) return False, tuple() return True, ru(tuple([x[0] for x in result_list]), True)
def get_statistics_for_single_obsid(obsid='', table='w_levels', data_columns=None): Statistics_list = [0] * 4 if data_columns is None: data_columns = ['meas', 'level_masl'] data_column = data_columns[0] #default value #number of values, also decide wehter to use meas or level_masl in report for column in data_columns: sql = r"""select Count(%s) from %s where obsid = '%s'""" % ( column, table, obsid) ConnectionOK, number_of_values = db_utils.sql_load_fr_db(sql) if number_of_values and number_of_values[0][0] > Statistics_list[ 2]: #this will select meas if meas >= level_masl data_column = column Statistics_list[2] = number_of_values[0][0] #min value sql = r"""select min(%s) from %s where obsid = '%s'""" % (data_column, table, obsid) ConnectionOK, min_value = db_utils.sql_load_fr_db(sql) if min_value: Statistics_list[0] = min_value[0][0] #median value median_value = db_utils.calculate_median_value(table, data_column, obsid) if median_value: Statistics_list[1] = median_value #max value sql = r"""select max(%s) from %s where obsid = '%s'""" % (data_column, table, obsid) ConnectionOK, max_value = db_utils.sql_load_fr_db(sql) if max_value: Statistics_list[3] = max_value[0][0] return data_column, Statistics_list
def get_distinct_values(tablename, columnname): if not tablename or not columnname: return [] sql = '''SELECT DISTINCT %s FROM %s''' % (columnname, tablename) connection_ok, result = db_utils.sql_load_fr_db(sql) if not connection_ok: utils.MessagebarAndLog.critical( bar_msg=utils.sql_failed_msg(), log_msg=ru(QCoreApplication.translate('DistinctValuesBrowser', """Cannot get data from sql %s"""))%ru(sql)) return [] values = [ru(col[0]) for col in result] return values
def get_last_used_quality_instruments(): """ Returns quality instrumentids :return: A tuple with instrument ids from w_qual_field """ sql = '''select parameter, unit, instrument, staff, max(date_time) from w_qual_field group by parameter, unit, instrument, staff order by parameter, date_time desc, unit asc, staff''' connection_ok, result = db_utils.sql_load_fr_db(sql) result_dict = {} # create dict like {parameter: {staff: [row]}, staff list ordered by parameter, staff, data_time (unit). [result_dict.setdefault(row[0], {}).setdefault(row[3], []).append(row) for row in result] return ru(result_dict, True)
def get_distinct_values(tablename, columnname): if not tablename or not columnname: return [] sql = '''SELECT distinct %s FROM %s'''%(columnname, tablename) connection_ok, result = db_utils.sql_load_fr_db(sql) if not connection_ok: utils.MessagebarAndLog.critical( bar_msg=ru(QCoreApplication.translate('ParameterBrowser', "Error, sql failed, see log message panel")), log_msg=ru(QCoreApplication.translate('ParameterBrowser', """Cannot get data from sql %s"""))%ru(sql)) return [] values = [col[0] for col in result] return values
def test_wlvllogg_import_from_diveroffice_files_cancel(self): files = [('Location=rb2', 'Date/time,Water head[cm],Temperature[°C]', '2016/03/15 10:30:00,1,10', '2016/03/15 11:00:00,11,101') ] db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('Rb1')''') DiverofficeImport.charsetchoosen = 'utf-8' with utils.tempinput('\n'.join(files[0]), DiverofficeImport.charsetchoosen) as f1: filenames = [f1] utils_askuser_answer_no_obj = MockUsingReturnValue(None) utils_askuser_answer_no_obj.result = 0 utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj) @mock.patch('import_data_to_db.utils.NotFoundQuestion') @mock.patch('db_utils.QgsProject.instance', utils_for_tests.MidvattenTestSpatialiteNotCreated.mock_instance_settings_database) @mock.patch('import_data_to_db.utils.Askuser') @mock.patch('qgis.utils.iface', autospec=True) @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText') @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True) @mock.patch('import_data_to_db.utils.select_files') def _test_wlvllogg_import_from_diveroffice_files(self, filenames, mock_filenames, mock_skippopup, mock_encoding, mock_iface, mock_askuser, mock_notfoundquestion): mock_notfoundquestion.return_value.answer = 'cancel' mock_notfoundquestion.return_value.value = 'rb1' mock_notfoundquestion.return_value.reuse_column = 'location' mock_filenames.return_value = filenames mock_encoding.return_value = ['utf-8'] ms = MagicMock() ms.settingsdict = OrderedDict() importer = DiverofficeImport(self.iface.mainWindow(), ms) importer.select_files_and_load_gui() importer.import_all_data.checked = True importer.confirm_names.checked = False answer = importer.start_import(importer.files, importer.skip_rows.checked, importer.confirm_names.checked, importer.import_all_data.checked) return answer answer = _test_wlvllogg_import_from_diveroffice_files(self, filenames) test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger''')) reference_string = r'''(True, [])''' assert test_string == reference_string
def test_calc_selected(self, mock_selected_obsids): mock_selected_obsids.return_value = ['rb1'] db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''') db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', 4)''') db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')''') self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit() self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00')) self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit() self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00')) self.calclvl.calcselected() test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid')) reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0), (rb2, 2005-01-01 00:00:00, 444.0, None, None)])' assert test_string == reference_string
def test_calibrlogger_add_to_level_masl(self, mock_messagebar): db_utils.sql_alter_db(u"INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db(u"INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)") calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) calibrlogger.update_plot() calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date(u'2000-01-01 00:00:00')) calibrlogger.Add2Levelmasl.setText(u'50') gui_utils.set_combobox(calibrlogger.combobox_obsid, u'rb1 (uncalibrated)') calibrlogger.add_to_level_masl() test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db(u'SELECT * FROM w_levels_logger')) ref = u'(True, [(rb1, 2017-02-01 00:00, None, None, None, 150.0, None)])' print(test) assert test == ref
def get_distinct_values(tablename, columnname): if not tablename or not columnname: return [] sql = '''SELECT DISTINCT %s FROM %s''' % (columnname, tablename) connection_ok, result = db_utils.sql_load_fr_db(sql) if not connection_ok: utils.MessagebarAndLog.critical( bar_msg=utils.sql_failed_msg(), log_msg=ru( QCoreApplication.translate( u'DistinctValuesBrowser', u"""Cannot get data from sql %s""")) % ru(sql)) return [] values = [ru(col[0]) for col in result] return values
def test_calibrlogger_add_to_level_masl(self, mock_messagebar): db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)") calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) calibrlogger.update_plot() calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00')) calibrlogger.Add2Levelmasl.setText('50') gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)') calibrlogger.add_to_level_masl() test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger')) ref = '(True, [(rb1, 2017-02-01 00:00, None, None, None, 150.0, None)])' print(test) assert test == ref
def test_calibrlogger_calc_best_fit_log_pos_out_of_radius(self, mock_messagebar, skip_popup): db_utils.sql_alter_db(u"INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db(u"INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)") db_utils.sql_alter_db(u"INSERT INTO w_levels_logger (obsid, date_time, head_cm) VALUES ('rb1', '2017-03-01 00:00', 50)") calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) calibrlogger.update_plot() calibrlogger.loggerpos_masl_or_offset_state = 1 calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date(u'2000-01-01 00:00:00')) gui_utils.set_combobox(calibrlogger.combobox_obsid, u'rb1 (uncalibrated)') calibrlogger.calc_best_fit() test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db(u'SELECT * FROM w_levels_logger')) ref = u'(True, [(rb1, 2017-03-01 00:00, 50.0, None, None, None, None)])' print(test) assert test == ref
def selection_layer_in_db_or_not( self ): #this is not used, it might be if using layer_styles stored in the db sql = r"""select name from sqlite_master where name = 'layer_styles'""" result = db_utils.sql_load_fr_db(sql)[1] if len(result) == 0: #if it is an old database w/o styles update_db = utils.Askuser( "YesNo", """Your database was created with plugin version < 1.1 when layer styles were not stored in the database. You can update this database to the new standard with layer styles (symbols, colors, labels, input forms etc) stored in the database. This will increase plugin stability and multi-user experience but it will also change the layout of all your forms for entering data into the database. Anyway, an update of the database is recommended. Do you want to add these layer styles now?""", 'Update database with layer styles?') if update_db.result == 1: from .create_db import AddLayerStyles AddLayerStyles() self.add_layers_new_method() else: self.add_layers_old_method() else: self.add_layers_new_method()
def test_calibrlogger_calc_best_fit_log_pos_out_of_radius(self, mock_messagebar, skip_popup): db_utils.sql_alter_db("INSERT INTO obs_points (obsid) VALUES ('rb1')") db_utils.sql_alter_db("INSERT INTO w_levels (obsid, date_time, level_masl) VALUES ('rb1', '2017-02-01 00:00', 100)") db_utils.sql_alter_db("INSERT INTO w_levels_logger (obsid, date_time, head_cm) VALUES ('rb1', '2017-03-01 00:00', 50)") calibrlogger = Calibrlogger(self.iface.mainWindow(), self.ms) calibrlogger.update_plot() calibrlogger.loggerpos_masl_or_offset_state = 1 calibrlogger.FromDateTime.setDateTime(date_utils.datestring_to_date('2000-01-01 00:00:00')) gui_utils.set_combobox(calibrlogger.combobox_obsid, 'rb1 (uncalibrated)') calibrlogger.calc_best_fit() test = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('SELECT * FROM w_levels_logger')) ref = '(True, [(rb1, 2017-03-01 00:00, 50.0, None, None, None, None)])' print(test) assert test == ref
def calc(self, obsids): fr_d_t = self.FromDateTime.dateTime().toPyDateTime() to_d_t = self.ToDateTime.dateTime().toPyDateTime() sql = """SELECT obsid FROM obs_points WHERE obsid IN ({}) AND h_toc IS NULL""".format(', '.join(["'{}'".format(x) for x in obsids])) obsid_with_h_toc_null = db_utils.sql_load_fr_db(sql)[1] if obsid_with_h_toc_null: obsid_with_h_toc_null = [x[0] for x in obsid_with_h_toc_null] if self.checkBox_stop_if_null.isChecked(): any_nulls = [obsid for obsid in obsids if obsid in obsid_with_h_toc_null] if any_nulls: utils.pop_up_info(ru(QCoreApplication.translate('Calclvl', 'Adjustment aborted! There seems to be NULL values in your table obs_points, column h_toc.')), ru(QCoreApplication.translate('Calclvl', 'Error'))) return None else: obsids = [obsid for obsid in obsids if obsid not in obsid_with_h_toc_null] if not obsids: utils.pop_up_info(ru(QCoreApplication.translate('Calclvl', 'Adjustment aborted! All h_tocs were NULL.')), ru(QCoreApplication.translate('Calclvl', 'Error'))) return None formatted_obsids = ', '.join(["'{}'".format(x) for x in obsids]) where_args = {'fr_dt': str(fr_d_t), 'to_dt': str(to_d_t), 'obsids': formatted_obsids} where_sql = """meas IS NOT NULL AND date_time >= '{fr_dt}' AND date_time <= '{to_dt}' AND obsid IN ({obsids})""".format(**where_args) if not self.checkBox_overwrite_prev.isChecked(): where_sql += """ AND level_masl IS NULL """ sql1 = """UPDATE w_levels SET h_toc = (SELECT obs_points.h_toc FROM obs_points WHERE w_levels.obsid = obs_points.obsid) WHERE {}""".format(where_sql) self.updated_h_tocs = self.log_msg(where_sql) db_utils.sql_alter_db(sql1) where_sql += """ AND h_toc IS NOT NULL""" sql2 = """UPDATE w_levels SET level_masl = h_toc - meas WHERE h_toc IS NOT NULL AND {}""".format(where_sql) self.updated_level_masl = self.log_msg(where_sql) db_utils.sql_alter_db(sql2) utils.MessagebarAndLog.info(bar_msg=ru(QCoreApplication.translate('Calclvl', 'Calculation done, see log message panel')), log_msg=ru(QCoreApplication.translate('Calclvl', 'H_toc added and level_masl calculated for\nobsid;min date;max date;calculated number of measurements: \n%s'))%(self.updated_level_masl)) self.close()
def test_calc_selected_dont_overwrite(self, mock_selected_obsids, mock_messagebar): mock_selected_obsids.return_value = ['rb1', 'rb2'] db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb1', 1)''') db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb1', 222, '2005-01-01 00:00:00')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid, h_toc) VALUES ('rb2', 4)''') db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, date_time) VALUES ('rb2', 444, '2005-01-01 00:00:00')''') db_utils.sql_alter_db('''INSERT into w_levels (obsid, meas, level_masl, date_time) VALUES ('rb2', 555, 667, '2005-01-02 00:00:00')''') self.calclvl.FromDateTime = QtWidgets.QDateTimeEdit() self.calclvl.FromDateTime.setDateTime(datestring_to_date('2000-01-01 00:00:00')) self.calclvl.ToDateTime = QtWidgets.QDateTimeEdit() self.calclvl.ToDateTime.setDateTime(datestring_to_date('2010-01-01 00:00:00')) self.calclvl.checkBox_overwrite_prev.setChecked(False) self.calclvl.calcselected() #self.checkBox_skipnulls test_string = utils_for_tests.create_test_string( db_utils.sql_load_fr_db('SELECT obsid, date_time, meas, h_toc, level_masl FROM w_levels ORDER BY obsid, date_time')) reference_string = '(True, [(rb1, 2005-01-01 00:00:00, 222.0, 1.0, -221.0), (rb2, 2005-01-01 00:00:00, 444.0, 4.0, -440.0), (rb2, 2005-01-02 00:00:00, 555.0, None, 667.0)])' print(str(mock_messagebar.mock_calls)) print(test_string) assert test_string == reference_string
def test_wlvllogg_import_from_diveroffice_files_skip_obsid(self): files = [('Location=rb1', 'Date/time,Water head[cm],Temperature[°C]', '2016/03/15 10:30:00,1,10', '2016/03/15 11:00:00,11,101'), ('Location=rb2', 'Date/time,Water head[cm],Temperature[°C]', '2016/04/15 10:30:00,2,20', '2016/04/15 11:00:00,21,201'), ('Location=rb3', 'Date/time,Water head[cm],Temperature[°C],Conductivity[mS/cm]', '2016/05/15 10:30:00,3,30,5', '2016/05/15 11:00:00,31,301,6') ] db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('rb1')''') db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('rb2')''') DiverofficeImport.charsetchoosen = 'utf-8' with utils.tempinput('\n'.join(files[0]), DiverofficeImport.charsetchoosen) as f1: with utils.tempinput('\n'.join(files[1]), DiverofficeImport.charsetchoosen) as f2: with utils.tempinput('\n'.join(files[2]), DiverofficeImport.charsetchoosen) as f3: filenames = [f1, f2, f3] utils_askuser_answer_no_obj = MockUsingReturnValue(None) utils_askuser_answer_no_obj.result = 0 utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj) @mock.patch("midvatten_utils.MessagebarAndLog") @mock.patch('import_data_to_db.utils.NotFoundQuestion') @mock.patch('db_utils.QgsProject.instance', utils_for_tests.MidvattenTestSpatialiteNotCreated.mock_instance_settings_database) @mock.patch('import_data_to_db.utils.Askuser') @mock.patch('qgis.utils.iface', autospec=True) @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText') @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True) @mock.patch('import_data_to_db.utils.select_files') def _test_wlvllogg_import_from_diveroffice_files(self, filenames, mock_filenames, mock_skippopup, mock_encoding, mock_iface, mock_askuser, mock_notfoundquestion, mock_messagebarandlog): mocks_notfoundquestion = [] for answer, value in [['ok', 'rb1'], ['ok', 'rb2'], ['skip', 'rb3']]: a_mock = MagicMock() a_mock.answer = answer a_mock.value = value a_mock.reuse_column = 'location' mocks_notfoundquestion.append(a_mock) mock_notfoundquestion.side_effect = mocks_notfoundquestion mock_filenames.return_value = filenames mock_encoding.return_value = ['utf-8'] ms = MagicMock() ms.settingsdict = OrderedDict() importer = DiverofficeImport(self.iface.mainWindow(), ms) importer.select_files_and_load_gui() importer.start_import(importer.files, importer.skip_rows.checked, importer.confirm_names.checked, importer.import_all_data.checked) print('\n'.join([str(x) for x in mock_messagebarandlog.mock_calls])) _test_wlvllogg_import_from_diveroffice_files(self, filenames) test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger''')) reference_string = r'''(True, [(rb1, 2016-03-15 10:30:00, 1.0, 10.0, None, None, None), (rb1, 2016-03-15 11:00:00, 11.0, 101.0, None, None, None), (rb2, 2016-04-15 10:30:00, 2.0, 20.0, None, None, None), (rb2, 2016-04-15 11:00:00, 21.0, 201.0, None, None, None)])''' print(test_string) print(reference_string) assert test_string == reference_string