def test_wlvllogg_import_from_diveroffice_files_cancel(self):
        files = [('Location=rb2', 'Date/time,Water head[cm],Temperature[°C]',
                  '2016/03/15 10:30:00,1,10', '2016/03/15 11:00:00,11,101')]

        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid) VALUES ('Rb1')''')

        DiverofficeImport.charsetchoosen = 'utf-8'
        with utils.tempinput('\n'.join(files[0]),
                             DiverofficeImport.charsetchoosen) as f1:
            filenames = [f1]
            utils_askuser_answer_no_obj = MockUsingReturnValue(None)
            utils_askuser_answer_no_obj.result = 0
            utils_askuser_answer_no = MockUsingReturnValue(
                utils_askuser_answer_no_obj)

            @mock.patch('import_data_to_db.utils.NotFoundQuestion')
            @mock.patch('db_utils.QgsProject.instance',
                        utils_for_tests.MidvattenTestPostgisNotCreated.
                        mock_instance_settings_database)
            @mock.patch('db_utils.get_postgis_connections',
                        utils_for_tests.MidvattenTestPostgisNotCreated.
                        mock_postgis_connections)
            @mock.patch('import_data_to_db.utils.Askuser')
            @mock.patch('qgis.utils.iface', autospec=True)
            @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText')
            @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
            @mock.patch('import_data_to_db.utils.select_files')
            def _test_wlvllogg_import_from_diveroffice_files(
                    self, filenames, mock_filenames, mock_skippopup,
                    mock_encoding, mock_iface, mock_askuser,
                    mock_notfoundquestion):
                mock_notfoundquestion.return_value.answer = 'cancel'
                mock_notfoundquestion.return_value.value = 'rb1'
                mock_notfoundquestion.return_value.reuse_column = 'location'
                mock_filenames.return_value = filenames
                mock_encoding.return_value = ['utf-8']

                ms = MagicMock()
                ms.settingsdict = OrderedDict()
                importer = DiverofficeImport(self.iface.mainWindow(), ms)
                importer.select_files_and_load_gui()
                importer.import_all_data.checked = True
                importer.confirm_names.checked = False
                answer = importer.start_import(
                    importer.files, importer.skip_rows.checked,
                    importer.confirm_names.checked,
                    importer.import_all_data.checked)

                return answer

            answer = _test_wlvllogg_import_from_diveroffice_files(
                self, filenames)

            test_string = utils_for_tests.create_test_string(
                db_utils.sql_load_fr_db(
                    '''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger'''
                ))
            reference_string = r'''(True, [])'''
            assert test_string == reference_string
Example #2
0
class TestWFlowCalcAveflow(object):
    return_int = MockUsingReturnValue(int)
    db_all_distinct_obsids = MockUsingReturnValue([True, [u'1', u'2']])
    selected_obs = MockUsingReturnValue([u'3', u'4'])
    mocked_iface = MockQgisUtilsIface()
    utilssql_load_fr_db = MockReturnUsingDictIn(
        {
            'select distinct obsid, instrumentid from':
            (True, [(u'1', u'inst1'), (u'2', u'inst2')]),
            'select date_time, reading from w_flow where flowtype':
            (True, [(u'2015-01-01 00:00:00', u'10'),
                    (u'2016-01-01 00:00:00', u'20')]),
            'insert or ignore into w_flow': (True, None)
        }, 0)
    return_none = MockUsingReturnValue(None)

    def setUp(self):
        self.iface = DummyInterface()
        widget = QtGui.QWidget()
        self.calcave = w_flow_calc_aveflow.Calcave(widget)

    @mock.patch('w_flow_calc_aveflow.utils.sql_load_fr_db',
                db_all_distinct_obsids.get_v)
    @mock.patch('w_flow_calc_aveflow.Calcave.calculateaveflow',
                return_int.get_v)
    def test_calcall(self):
        self.calcave.calcall()
        result_list = self.calcave.observations
        reference_list = ['1', '2']
        assert result_list == reference_list

    @mock.patch('qgis.utils.iface', mocked_iface)
    @mock.patch('w_flow_calc_aveflow.utils.getselectedobjectnames',
                selected_obs.get_v)
    @mock.patch('w_flow_calc_aveflow.Calcave.calculateaveflow',
                return_int.get_v)
    def test_calcselected(self):
        self.calcave.calcselected()
        result_list = self.calcave.observations
        reference_list = ['3', '4']
        assert result_list == reference_list

    @mock.patch('qgis.utils.iface', mocked_iface)
    @mock.patch('w_flow_calc_aveflow.utils.sql_alter_db', return_none.get_v)
    @mock.patch('w_flow_calc_aveflow.utils.sql_load_fr_db',
                utilssql_load_fr_db.get_v)
    def test_calculateaveflow(self):
        self.calcave.observations = ['1', '2']
        self.calcave.calculateaveflow()
        # datestr2num('2015-01-01 00:00:00') == 735599.0
        # datestr2num('2016-01-01 00:00:00') == 735964.0
        # DeltaTime = 24*3600*(735964.0 - 735599.0) == 31536000.0
        #Aveflow = Volume/DeltaTime#L/s == 10000 / 31536000.0 = 0.000317097919838

        reference_list = [
            u"insert or ignore into w_flow(obsid,instrumentid,flowtype,date_time,reading,unit) values('1','inst1','Aveflow','2016-01-01 00:00:00','0.000317097919838','l/s')",
            u"insert or ignore into w_flow(obsid,instrumentid,flowtype,date_time,reading,unit) values('2','inst2','Aveflow','2016-01-01 00:00:00','0.000317097919838','l/s')"
        ]
        assert self.return_none.args_called_with == reference_list
    def test_wlvllogg_import_from_diveroffice_files(self):
        files = [('Location=rb1',
                'Date/time,Water head[cm],Temperature[°C]',
                '2016/03/15 10:30:00,1,10',
                '2016/03/15 11:00:00,11,101'),
                ('Location=rb2',
                'Date/time,Water head[cm],Temperature[°C]',
                '2016/04/15 10:30:00,2,20',
                '2016/04/15 11:00:00,21,201'),
                ('Location=rb3',
                'Date/time,Water head[cm],Temperature[°C],Conductivity[mS/cm]',
                '2016/05/15 10:30:00,3,30,5',
                '2016/05/15 11:00:00,31,301,6')
                 ]

        db_utils.sql_alter_db('''INSERT INTO obs_points (obsid) VALUES ('rb1')''')

        DiverofficeImport.charsetchoosen = 'utf-8'
        with utils.tempinput('\n'.join(files[0]), DiverofficeImport.charsetchoosen) as f1:
            with utils.tempinput('\n'.join(files[1]), DiverofficeImport.charsetchoosen) as f2:
                with utils.tempinput('\n'.join(files[2]), DiverofficeImport.charsetchoosen) as f3:

                    filenames = [f1, f2, f3]
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('import_data_to_db.utils.NotFoundQuestion')
                    @mock.patch('db_utils.QgsProject.instance', utils_for_tests.MidvattenTestPostgisNotCreated.mock_instance_settings_database)
                    @mock.patch('db_utils.get_postgis_connections', utils_for_tests.MidvattenTestPostgisNotCreated.mock_postgis_connections)
                    @mock.patch('import_data_to_db.utils.Askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch('import_data_to_db.utils.select_files')
                    def _test_wlvllogg_import_from_diveroffice_files(self, filenames, mock_filenames, mock_skippopup, mock_encoding, mock_iface, mock_askuser, mock_notfoundquestion):
                        mock_notfoundquestion.return_value.answer = 'ok'
                        mock_notfoundquestion.return_value.value = 'rb1'
                        mock_notfoundquestion.return_value.reuse_column = 'location'
                        mock_filenames.return_value = filenames
                        mock_encoding.return_value = ['utf-8']

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = DiverofficeImport(self.iface.mainWindow(), ms)
                        importer.select_files_and_load_gui()

                        importer.start_import(importer.files, importer.skip_rows.checked, importer.confirm_names.checked, importer.import_all_data.checked)


                    _test_wlvllogg_import_from_diveroffice_files(self, filenames)

                    test_string = utils_for_tests.create_test_string(db_utils.sql_load_fr_db('''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger'''))
                    reference_string = r'''(True, [(rb1, 2016-03-15 10:30:00, 1.0, 10.0, None, None, None), (rb1, 2016-03-15 11:00:00, 11.0, 101.0, None, None, None), (rb1, 2016-04-15 10:30:00, 2.0, 20.0, None, None, None), (rb1, 2016-04-15 11:00:00, 21.0, 201.0, None, None, None), (rb1, 2016-05-15 10:30:00, 3.0, 30.0, 5.0, None, None), (rb1, 2016-05-15 11:00:00, 31.0, 301.0, 6.0, None, None)])'''
                    assert test_string == reference_string
Example #4
0
class TestAskUser(object):
    qgis_PyQt_QtGui_QInputDialog_getText = MockUsingReturnValue(['-1 hours'])
    cancel = MockUsingReturnValue([''])

    @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText', qgis_PyQt_QtGui_QInputDialog_getText.get_v)
    def test_askuser_dateshift(self):
        question = utils.Askuser('DateShift')
        assert question.result == ['-1', 'hours']

    @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText', cancel.get_v)
    def test_askuser_dateshift_cancel(self):
        question = utils.Askuser('DateShift')
        assert question.result == 'cancel'
Example #5
0
class TestExport(utils_for_tests.MidvattenTestPostgisDbSv):
    answer_yes_obj = MockUsingReturnValue()
    answer_yes_obj.result = 1
    answer_no_obj = MockUsingReturnValue()
    answer_no_obj.result = 0
    answer_yes = MockUsingReturnValue(answer_yes_obj)
    crs_question = MockUsingReturnValue([3006])
    mock_askuser = MockReturnUsingDictIn(
        {
            u'It is a strong': answer_no_obj,
            u'Please note!\nThere are ': answer_yes_obj
        }, 1)
    skip_popup = MockUsingReturnValue('')
    mock_selection = MockReturnUsingDictIn(
        {
            u'obs_points': (u'P1', ),
            u'obs_lines': (u'L1', )
        }, 0)
    exported_csv_files = [
        os.path.join(TEMP_DIR, filename) for filename in [
            'obs_points.csv', 'comments.csv', 'w_levels.csv', 'w_flow.csv',
            'w_qual_lab.csv', 'w_qual_field.csv', 'stratigraphy.csv',
            'meteo.csv', 'obs_lines.csv', 'seismic_data.csv',
            'zz_flowtype.csv', 'zz_meteoparam.csv', 'zz_staff.csv',
            'zz_strat.csv', 'zz_capacity.csv'
        ]
    ]
    exported_csv_files_no_zz = [
        os.path.join(TEMP_DIR, filename) for filename in [
            'obs_points.csv', 'comments.csv', 'w_levels.csv', 'w_flow.csv',
            'w_qual_lab.csv', 'w_qual_field.csv', 'stratigraphy.csv',
            'meteo.csv', 'obs_lines.csv', 'seismic_data.csv'
        ]
    ]

    @mock.patch('midvatten_utils.MessagebarAndLog')
    @mock.patch('midvatten_utils.get_selected_features_as_tuple',
                mock_selection.get_v)
    @mock.patch('PyQt4.QtGui.QFileDialog.getExistingDirectory')
    @mock.patch('qgis.utils.iface', autospec=True)
    @mock.patch('db_utils.QgsProject.instance', utils_for_tests.
                MidvattenTestPostgisNotCreated.mock_instance_settings_database)
    @mock.patch(
        'db_utils.get_postgis_connections',
        utils_for_tests.MidvattenTestPostgisNotCreated.mock_postgis_connections
    )
    def test_export_csv(self, mock_iface, mock_savepath, mock_messagebar):
        mock_savepath.return_value = u'/tmp/'
        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(633466 711659)', 3006))'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO zz_staff (staff) VALUES ('s1')''')
        db_utils.sql_alter_db(
            u'''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('P1', '2015-01-01 00:00:00', 's1', 'comment1')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO w_qual_lab (obsid, parameter, report, staff) VALUES ('P1', 'labpar1', 'report1', 's1')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO w_qual_field (obsid, parameter, staff, date_time, unit) VALUES ('P1', 'labpar1', 's1', '2015-01-01 01:00:00', 'unit1')'''
        )

        db_utils.sql_alter_db(
            u'''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, unit) VALUES ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO w_levels (obsid, date_time, meas) VALUES ('P1', '2015-01-02 00:00:01', '2')'''
        )
        db_utils.sql_alter_db(
            u'''INSERT INTO stratigraphy (obsid, stratid) VALUES ('P1', 1)''')
        db_utils.sql_alter_db(
            u'''INSERT INTO obs_lines (obsid) VALUES ('L1')''')
        db_utils.sql_alter_db(
            u'''INSERT INTO seismic_data (obsid, length) VALUES ('L1', '5')''')
        db_utils.sql_alter_db(
            u'''INSERT INTO meteo (obsid, instrumentid, parameter, date_time) VALUES ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')'''
        )

        self.midvatten.export_csv()

        file_contents = []
        for filename in TestExport.exported_csv_files_no_zz:
            with io.open(filename, 'r', encoding='utf-8') as f:
                file_contents.append(os.path.basename(filename) + '\n')
                if os.path.basename(filename) == u'obs_points.csv':
                    file_contents.append([
                        u';'.join(l.replace('\r', '').split(u';')[:-1]) + u'\n'
                        for l in f
                    ])
                else:
                    file_contents.append([l.replace('\r', '') for l in f])
        test_string = utils_for_tests.create_test_string(file_contents)

        with io.open('/tmp/refstring.txt', 'w', encoding='utf-8') as of:
            of.write(test_string)

        reference_string = '\n'.join([
            "[obs_points.csv",
            ", [obsid;name;place;type;length;drillstop;diam;material;screen;capacity;drilldate;wmeas_yn;wlogg_yn;east;north;ne_accur;ne_source;h_toc;h_tocags;h_gs;h_accur;h_syst;h_source;source;com_onerow;com_html",
            ", P1;;;;;;;;;;;;;633466.0;711659.0;;;;;;;;;;;", "], comments.csv",
            ", [obsid;date_time;comment;staff",
            ", P1;2015-01-01 00:00:00;comment1;s1", "], w_levels.csv",
            ", [obsid;date_time;meas;h_toc;level_masl;comment",
            ", P1;2015-01-02 00:00:01;2.0;;;", "], w_flow.csv",
            ", [obsid;instrumentid;flowtype;date_time;reading;unit;comment",
            ", P1;inst1;Momflow;2015-04-13 00:00:00;;l/s;",
            "], w_qual_lab.csv",
            ", [obsid;depth;report;project;staff;date_time;anameth;parameter;reading_num;reading_txt;unit;comment",
            ", P1;;report1;;s1;;;labpar1;;;;", "], w_qual_field.csv",
            ", [obsid;staff;date_time;instrument;parameter;reading_num;reading_txt;unit;depth;comment",
            ", P1;s1;2015-01-01 01:00:00;;labpar1;;;unit1;;",
            "], stratigraphy.csv",
            ", [obsid;stratid;depthtop;depthbot;geology;geoshort;capacity;development;comment",
            ", P1;1;;;;;;;", "], meteo.csv",
            ", [obsid;instrumentid;parameter;date_time;reading_num;reading_txt;unit;comment",
            ", P1;meteoinst;precip;2017-01-01 00:19:00;;;;",
            "], obs_lines.csv", ", [obsid;name;place;type;source;geometry",
            ", L1;;;;;", "], seismic_data.csv",
            ", [obsid;length;ground;bedrock;gw_table;comment", ", L1;5.0;;;;",
            "]]"
        ])

        assert test_string == reference_string

    def tearDown(self):
        #Delete database
        try:
            os.remove(EXPORT_DB_PATH)
        except OSError:
            pass

        for filename in TestExport.exported_csv_files:
            try:
                os.remove(filename)
            except OSError:
                pass

        super(self.__class__, self).tearDown()
class TestExport(utils_for_tests.MidvattenTestSpatialiteDbEn):
    answer_yes_obj = MockUsingReturnValue()
    answer_yes_obj.result = 1
    answer_no_obj = MockUsingReturnValue()
    answer_no_obj.result = 0
    answer_yes = MockUsingReturnValue(answer_yes_obj)
    crs_question = MockUsingReturnValue([3006])
    mock_askuser = MockReturnUsingDictIn(
        {
            'It is a strong': answer_no_obj,
            'Please note!\nThere are ': answer_yes_obj
        }, 1)
    skip_popup = MockUsingReturnValue('')
    mock_selection = MockReturnUsingDictIn(
        {
            'obs_points': ('P1', ),
            'obs_lines': ('L1', )
        }, 0)
    mock_no_selection = MockReturnUsingDictIn(
        {
            'obs_points': tuple(),
            'obs_lines': tuple()
        }, 0)
    exported_csv_files = [
        os.path.join(TEMP_DIR, filename) for filename in [
            'obs_points.csv', 'comments.csv', 'w_levels.csv', 'w_flow.csv',
            'w_qual_lab.csv', 'w_qual_field.csv', 'stratigraphy.csv',
            'meteo.csv', 'obs_lines.csv', 'seismic_data.csv',
            'zz_flowtype.csv', 'zz_meteoparam.csv', 'zz_staff.csv',
            'zz_strat.csv', 'zz_capacity.csv'
        ]
    ]
    exported_csv_files_no_zz = [
        os.path.join(TEMP_DIR, filename) for filename in [
            'obs_points.csv', 'comments.csv', 'w_levels.csv', 'w_flow.csv',
            'w_qual_lab.csv', 'w_qual_field.csv', 'stratigraphy.csv',
            'meteo.csv', 'obs_lines.csv', 'seismic_data.csv'
        ]
    ]

    @mock.patch('midvatten_utils.get_selected_features_as_tuple',
                mock_selection.get_v)
    @mock.patch('qgis.PyQt.QtWidgets.QFileDialog.getExistingDirectory')
    @mock.patch('qgis.utils.iface', autospec=True)
    def test_export_csv(self, mock_iface, mock_savepath):
        mock_savepath.return_value = '/tmp/'
        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(633466 711659)', 3006))'''
        )
        db_utils.sql_alter_db('''INSERT INTO zz_staff (staff) VALUES ('s1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('P1', '2015-01-01 00:00:00', 's1', 'comment1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_lab (obsid, parameter, report, staff) VALUES ('P1', 'labpar1', 'report1', 's1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_field (obsid, parameter, staff, date_time, unit) VALUES ('P1', 'labpar1', 's1', '2015-01-01 01:00:00', 'unit1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, unit) VALUES ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_levels (obsid, date_time, meas) VALUES ('P1', '2015-01-02 00:00:01', '2')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot) VALUES ('P1', 1, 0, 10)'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO obs_lines (obsid) VALUES ('L1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO seismic_data (obsid, length) VALUES ('L1', '5')''')
        db_utils.sql_alter_db(
            '''INSERT INTO meteo (obsid, instrumentid, parameter, date_time) VALUES ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')'''
        )

        self.midvatten.export_csv()
        file_contents = []
        for filename in TestExport.exported_csv_files_no_zz:
            with io.open(filename, 'r', encoding='utf-8') as f:
                file_contents.append(os.path.basename(filename) + '\n')
                if os.path.basename(filename) == 'obs_points.csv':
                    file_contents.append([
                        ';'.join(l.replace('\r', '').split(';')[:-1]) + '\n'
                        for l in f
                    ])
                else:
                    file_contents.append([l.replace('\r', '') for l in f])
        test_string = utils_for_tests.create_test_string(file_contents)

        with io.open('/tmp/refstring.txt', 'w', encoding='utf-8') as of:
            of.write(test_string)

        reference_string = '\n'.join([
            "[obs_points.csv",
            ", [obsid;name;place;type;length;drillstop;diam;material;screen;capacity;drilldate;wmeas_yn;wlogg_yn;east;north;ne_accur;ne_source;h_toc;h_tocags;h_gs;h_accur;h_syst;h_source;source;com_onerow;com_html",
            ", P1;;;;;;;;;;;;;633466.0;711659.0;;;;;;;;;;;", "], comments.csv",
            ", [obsid;date_time;comment;staff",
            ", P1;2015-01-01 00:00:00;comment1;s1", "], w_levels.csv",
            ", [obsid;date_time;meas;h_toc;level_masl;comment",
            ", P1;2015-01-02 00:00:01;2.0;;;", "], w_flow.csv",
            ", [obsid;instrumentid;flowtype;date_time;reading;unit;comment",
            ", P1;inst1;Momflow;2015-04-13 00:00:00;;l/s;",
            "], w_qual_lab.csv",
            ", [obsid;depth;report;project;staff;date_time;anameth;parameter;reading_num;reading_txt;unit;comment",
            ", P1;;report1;;s1;;;labpar1;;;;", "], w_qual_field.csv",
            ", [obsid;staff;date_time;instrument;parameter;reading_num;reading_txt;unit;depth;comment",
            ", P1;s1;2015-01-01 01:00:00;;labpar1;;;unit1;;",
            "], stratigraphy.csv",
            ", [obsid;stratid;depthtop;depthbot;geology;geoshort;capacity;development;comment",
            ", P1;1;0.0;10.0;;;;;", "], meteo.csv",
            ", [obsid;instrumentid;parameter;date_time;reading_num;reading_txt;unit;comment",
            ", P1;meteoinst;precip;2017-01-01 00:19:00;;;;",
            "], obs_lines.csv", ", [obsid;name;place;type;source;geometry",
            ", L1;;;;;", "], seismic_data.csv",
            ", [obsid;length;ground;bedrock;gw_table;comment", ", L1;5.0;;;;",
            "]]"
        ])
        print(test_string)
        print(reference_string)
        assert test_string == reference_string

    @mock.patch('midvatten_utils.get_selected_features_as_tuple',
                mock_no_selection.get_v)
    @mock.patch('qgis.PyQt.QtWidgets.QFileDialog.getExistingDirectory')
    @mock.patch('qgis.utils.iface', autospec=True)
    def test_export_csv_no_selection(self, mock_iface, mock_savepath):
        mock_savepath.return_value = '/tmp/'
        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(633466 711659)', 3006))'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P2', ST_GeomFromText('POINT(1 2)', 3006))'''
        )
        db_utils.sql_alter_db('''INSERT INTO zz_staff (staff) VALUES ('s1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('P1', '2015-01-01 00:00:00', 's1', 'comment1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_lab (obsid, parameter, report, staff) VALUES ('P1', 'labpar1', 'report1', 's1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_field (obsid, parameter, staff, date_time, unit) VALUES ('P1', 'labpar1', 's1', '2015-01-01 01:00:00', 'unit1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, unit) VALUES ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_levels (obsid, date_time, meas) VALUES ('P1', '2015-01-02 00:00:01', '2')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot) VALUES ('P1', 1, 0, 10)'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO obs_lines (obsid) VALUES ('L1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO seismic_data (obsid, length) VALUES ('L1', '5')''')
        db_utils.sql_alter_db(
            '''INSERT INTO meteo (obsid, instrumentid, parameter, date_time) VALUES ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')'''
        )

        self.midvatten.export_csv()
        file_contents = []
        for filename in TestExport.exported_csv_files_no_zz:
            with io.open(filename, 'r', encoding='utf-8') as f:
                file_contents.append(os.path.basename(filename) + '\n')
                if os.path.basename(filename) == 'obs_points.csv':
                    file_contents.append([
                        ';'.join(l.replace('\r', '').split(';')[:-1]) + '\n'
                        for l in f
                    ])
                else:
                    file_contents.append([l.replace('\r', '') for l in f])
        test_string = utils_for_tests.create_test_string(file_contents)

        with io.open('/tmp/refstring.txt', 'w', encoding='utf-8') as of:
            of.write(test_string)

        reference_string = '\n'.join([
            "[obs_points.csv",
            ", [obsid;name;place;type;length;drillstop;diam;material;screen;capacity;drilldate;wmeas_yn;wlogg_yn;east;north;ne_accur;ne_source;h_toc;h_tocags;h_gs;h_accur;h_syst;h_source;source;com_onerow;com_html",
            ", P1;;;;;;;;;;;;;633466.0;711659.0;;;;;;;;;;;",
            ", P2;;;;;;;;;;;;;1.0;2.0;;;;;;;;;;;", "], comments.csv",
            ", [obsid;date_time;comment;staff",
            ", P1;2015-01-01 00:00:00;comment1;s1", "], w_levels.csv",
            ", [obsid;date_time;meas;h_toc;level_masl;comment",
            ", P1;2015-01-02 00:00:01;2.0;;;", "], w_flow.csv",
            ", [obsid;instrumentid;flowtype;date_time;reading;unit;comment",
            ", P1;inst1;Momflow;2015-04-13 00:00:00;;l/s;",
            "], w_qual_lab.csv",
            ", [obsid;depth;report;project;staff;date_time;anameth;parameter;reading_num;reading_txt;unit;comment",
            ", P1;;report1;;s1;;;labpar1;;;;", "], w_qual_field.csv",
            ", [obsid;staff;date_time;instrument;parameter;reading_num;reading_txt;unit;depth;comment",
            ", P1;s1;2015-01-01 01:00:00;;labpar1;;;unit1;;",
            "], stratigraphy.csv",
            ", [obsid;stratid;depthtop;depthbot;geology;geoshort;capacity;development;comment",
            ", P1;1;0.0;10.0;;;;;", "], meteo.csv",
            ", [obsid;instrumentid;parameter;date_time;reading_num;reading_txt;unit;comment",
            ", P1;meteoinst;precip;2017-01-01 00:19:00;;;;",
            "], obs_lines.csv", ", [obsid;name;place;type;source;geometry",
            ", L1;;;;;", "], seismic_data.csv",
            ", [obsid;length;ground;bedrock;gw_table;comment", ", L1;5.0;;;;",
            "]]"
        ])
        print(test_string)
        print(reference_string)
        assert test_string == reference_string

    @mock.patch('midvatten_utils.MessagebarAndLog')
    @mock.patch('midvatten_utils.QtWidgets.QInputDialog.getText')
    @mock.patch('create_db.utils.NotFoundQuestion')
    @mock.patch('midvatten_utils.Askuser', answer_yes.get_v)
    @mock.patch('midvatten_utils.get_selected_features_as_tuple',
                mock_selection.get_v)
    @mock.patch('midvatten_utils.verify_msettings_loaded_and_layer_edit_mode',
                autospec=True)
    @mock.patch('qgis.PyQt.QtWidgets.QFileDialog.getSaveFileName')
    @mock.patch('midvatten_utils.find_layer', autospec=True)
    @mock.patch('qgis.utils.iface', autospec=True)
    @mock.patch('export_data.utils.pop_up_info', autospec=True)
    def test_export_spatialite(self, mock_skip_popup, mock_iface,
                               mock_find_layer, mock_newdbpath, mock_verify,
                               mock_locale, mock_createdb_crs_question,
                               mock_messagebar):
        mock_find_layer.return_value.crs.return_value.authid.return_value = 'EPSG:3006'
        mock_createdb_crs_question.return_value = [3006, True]
        dbconnection = db_utils.DbConnectionManager()
        mock_newdbpath.return_value = (EXPORT_DB_PATH, '')
        mock_verify.return_value = 0

        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(633466 711659)', 3006))''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db('''INSERT INTO zz_staff (staff) VALUES ('s1')''',
                              dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('P1', '2015-01-01 00:00:00', 's1', 'comment1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_lab (obsid, parameter, report, staff) VALUES ('P1', 'labpar1', 'report1', 's1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_field (obsid, parameter, staff, date_time, unit) VALUES ('P1', 'par1', 's1', '2015-01-01 01:00:00', 'unit1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, unit) VALUES ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_levels (obsid, date_time, meas) VALUES ('P1', '2015-01-02 00:00:01', '2')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot) VALUES ('P1', 1, 0, 10)''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO obs_lines (obsid) VALUES ('L1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO seismic_data (obsid, length) VALUES ('L1', '5')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO meteo (obsid, instrumentid, parameter, date_time) VALUES ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')''',
            dbconnection=dbconnection)

        dbconnection.commit_and_closedb()

        mock_locale.return_value.answer = 'ok'
        mock_locale.return_value.value = 'sv_SE'
        self.midvatten.export_spatialite()

        sql_list = [
            '''select obsid, ST_AsText(geometry) from obs_points''',
            '''select staff from zz_staff''',
            '''select obsid, date_time, staff, comment from comments''',
            '''select obsid, parameter, report, staff from w_qual_lab''',
            '''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            '''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            '''select obsid, date_time, meas from w_levels''',
            '''select obsid, stratid, depthtop, depthbot from stratigraphy''',
            '''select obsid from obs_lines''',
            '''select obsid, length from seismic_data''',
            '''select obsid, instrumentid, parameter, date_time from meteo'''
        ]

        conn = db_utils.connect_with_spatialite_connect(EXPORT_DB_PATH)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)
        reference_string = [
            '''[''', '''select obsid, ST_AsText(geometry) from obs_points''',
            ''', [(P1, POINT(633466 711659))], ''',
            '''select staff from zz_staff''', ''', [(s1)], ''',
            '''select obsid, date_time, staff, comment from comments''',
            ''', [(P1, 2015-01-01 00:00:00, s1, comment1)], ''',
            '''select obsid, parameter, report, staff from w_qual_lab''',
            ''', [(P1, labpar1, report1, s1)], ''',
            '''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            ''', [(P1, par1, s1, 2015-01-01 01:00:00, None)], ''',
            '''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            ''', [(P1, inst1, Momflow, 2015-04-13 00:00:00, l/s)], ''',
            '''select obsid, date_time, meas from w_levels''',
            ''', [(P1, 2015-01-02 00:00:01, 2.0)], ''',
            '''select obsid, stratid, depthtop, depthbot from stratigraphy''',
            ''', [(P1, 1, 0.0, 10.0)], ''', '''select obsid from obs_lines''',
            ''', [(L1)], ''', '''select obsid, length from seismic_data''',
            ''', [(L1, 5.0)], ''',
            '''select obsid, instrumentid, parameter, date_time from meteo''',
            ''', [(P1, meteoinst, precip, 2017-01-01 00:19:00)]]'''
        ]
        reference_string = '\n'.join(reference_string)
        print("Ref:")
        print(str(reference_string))
        print("Test:")
        print(str(test_string))
        assert test_string == reference_string

    @mock.patch('midvatten_utils.MessagebarAndLog')
    @mock.patch('midvatten_utils.QtWidgets.QInputDialog.getText')
    @mock.patch('create_db.utils.NotFoundQuestion')
    @mock.patch('midvatten_utils.Askuser', answer_yes.get_v)
    @mock.patch('midvatten_utils.get_selected_features_as_tuple',
                mock_no_selection.get_v)
    @mock.patch('midvatten_utils.verify_msettings_loaded_and_layer_edit_mode',
                autospec=True)
    @mock.patch('qgis.PyQt.QtWidgets.QFileDialog.getSaveFileName')
    @mock.patch('midvatten_utils.find_layer', autospec=True)
    @mock.patch('qgis.utils.iface', autospec=True)
    @mock.patch('export_data.utils.pop_up_info', autospec=True)
    def test_export_spatialite_no_selected(self, mock_skip_popup, mock_iface,
                                           mock_find_layer, mock_newdbpath,
                                           mock_verify, mock_locale,
                                           mock_createdb_crs_question,
                                           mock_messagebar):
        mock_find_layer.return_value.crs.return_value.authid.return_value = 'EPSG:3006'
        mock_createdb_crs_question.return_value = [3006, True]
        dbconnection = db_utils.DbConnectionManager()
        mock_newdbpath.return_value = (EXPORT_DB_PATH, '')
        mock_verify.return_value = 0

        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(633466 711659)', 3006))''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P2', ST_GeomFromText('POINT(1 2)', 3006))'''
        )
        db_utils.sql_alter_db('''INSERT INTO zz_staff (staff) VALUES ('s1')''',
                              dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('P1', '2015-01-01 00:00:00', 's1', 'comment1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_lab (obsid, parameter, report, staff) VALUES ('P1', 'labpar1', 'report1', 's1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_field (obsid, parameter, staff, date_time, unit) VALUES ('P1', 'par1', 's1', '2015-01-01 01:00:00', 'unit1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, unit) VALUES ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO w_levels (obsid, date_time, meas) VALUES ('P1', '2015-01-02 00:00:01', '2')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot) VALUES ('P1', 1, 0, 10)''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO obs_lines (obsid) VALUES ('L1')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO seismic_data (obsid, length) VALUES ('L1', '5')''',
            dbconnection=dbconnection)
        db_utils.sql_alter_db(
            '''INSERT INTO meteo (obsid, instrumentid, parameter, date_time) VALUES ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')''',
            dbconnection=dbconnection)

        dbconnection.commit_and_closedb()

        mock_locale.return_value.answer = 'ok'
        mock_locale.return_value.value = 'sv_SE'
        self.midvatten.export_spatialite()

        sql_list = [
            '''select obsid, ST_AsText(geometry) from obs_points''',
            '''select staff from zz_staff''',
            '''select obsid, date_time, staff, comment from comments''',
            '''select obsid, parameter, report, staff from w_qual_lab''',
            '''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            '''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            '''select obsid, date_time, meas from w_levels''',
            '''select obsid, stratid, depthtop, depthbot from stratigraphy''',
            '''select obsid from obs_lines''',
            '''select obsid, length from seismic_data''',
            '''select obsid, instrumentid, parameter, date_time from meteo'''
        ]

        conn = db_utils.connect_with_spatialite_connect(EXPORT_DB_PATH)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)
        reference_string = [
            '''[''', '''select obsid, ST_AsText(geometry) from obs_points''',
            ''', [(P1, POINT(633466 711659)), (P2, POINT(1 2))], ''',
            '''select staff from zz_staff''', ''', [(s1)], ''',
            '''select obsid, date_time, staff, comment from comments''',
            ''', [(P1, 2015-01-01 00:00:00, s1, comment1)], ''',
            '''select obsid, parameter, report, staff from w_qual_lab''',
            ''', [(P1, labpar1, report1, s1)], ''',
            '''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            ''', [(P1, par1, s1, 2015-01-01 01:00:00, None)], ''',
            '''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            ''', [(P1, inst1, Momflow, 2015-04-13 00:00:00, l/s)], ''',
            '''select obsid, date_time, meas from w_levels''',
            ''', [(P1, 2015-01-02 00:00:01, 2.0)], ''',
            '''select obsid, stratid, depthtop, depthbot from stratigraphy''',
            ''', [(P1, 1, 0.0, 10.0)], ''', '''select obsid from obs_lines''',
            ''', [(L1)], ''', '''select obsid, length from seismic_data''',
            ''', [(L1, 5.0)], ''',
            '''select obsid, instrumentid, parameter, date_time from meteo''',
            ''', [(P1, meteoinst, precip, 2017-01-01 00:19:00)]]'''
        ]
        reference_string = '\n'.join(reference_string)
        print(test_string)
        print(str(mock_messagebar.mock_calls))
        assert test_string == reference_string

    @mock.patch('midvatten_utils.MessagebarAndLog')
    @mock.patch('midvatten_utils.QtWidgets.QInputDialog.getText')
    @mock.patch('create_db.utils.NotFoundQuestion')
    @mock.patch('midvatten_utils.Askuser', answer_yes.get_v)
    @mock.patch('midvatten_utils.get_selected_features_as_tuple')
    @mock.patch('midvatten_utils.verify_msettings_loaded_and_layer_edit_mode',
                autospec=True)
    @mock.patch('qgis.PyQt.QtWidgets.QFileDialog.getSaveFileName')
    @mock.patch('midvatten_utils.find_layer', autospec=True)
    @mock.patch('qgis.utils.iface', autospec=True)
    @mock.patch('export_data.utils.pop_up_info', autospec=True)
    def test_export_spatialite_with_umlauts(self, mock_skip_popup, mock_iface,
                                            mock_find_layer, mock_newdbpath,
                                            mock_verify, mock_selection,
                                            mock_locale,
                                            mock_createdb_crs_question,
                                            mock_messagebar):
        mock_selection.return_value = ('åäö', )
        mock_find_layer.return_value.crs.return_value.authid.return_value = 'EPSG:3006'
        mock_createdb_crs_question.return_value = [3006, True]

        mock_newdbpath.return_value = (EXPORT_DB_PATH, '')
        mock_verify.return_value = 0

        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('åäö', ST_GeomFromText('POINT(633466 711659)', 3006))'''
        )
        db_utils.sql_alter_db('''INSERT INTO zz_staff (staff) VALUES ('s1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('åäö', '2015-01-01 00:00:00', 's1', 'comment1')'''
        )

        mock_locale.return_value.answer = 'ok'
        mock_locale.return_value.value = 'sv_SE'
        self.midvatten.export_spatialite()

        sql_list = [
            '''select obsid, ST_AsText(geometry) from obs_points''',
            '''select staff from zz_staff''',
            '''select obsid, date_time, staff, comment from comments'''
        ]

        conn = db_utils.connect_with_spatialite_connect(EXPORT_DB_PATH)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)
        reference_string = [
            '''[''', '''select obsid, ST_AsText(geometry) from obs_points''',
            ''', [(åäö, POINT(633466 711659))], ''',
            '''select staff from zz_staff''', ''', [(s1)], ''',
            '''select obsid, date_time, staff, comment from comments''',
            ''', [(åäö, 2015-01-01 00:00:00, s1, comment1)]]'''
        ]
        reference_string = '\n'.join(reference_string)

        print("Ref")
        print(reference_string)
        print("Test")
        print(test_string)
        assert test_string == reference_string

    @mock.patch('midvatten_utils.MessagebarAndLog')
    @mock.patch('midvatten_utils.QtWidgets.QInputDialog.getText')
    @mock.patch('create_db.utils.NotFoundQuestion')
    @mock.patch('midvatten_utils.Askuser', answer_yes.get_v)
    @mock.patch('midvatten_utils.get_selected_features_as_tuple',
                mock_selection.get_v)
    @mock.patch('midvatten_utils.verify_msettings_loaded_and_layer_edit_mode',
                autospec=True)
    @mock.patch('qgis.PyQt.QtWidgets.QFileDialog.getSaveFileName')
    @mock.patch('midvatten_utils.find_layer', autospec=True)
    @mock.patch('qgis.utils.iface', autospec=True)
    @mock.patch('export_data.utils.pop_up_info', autospec=True)
    def test_export_spatialite_transform_coordinates(
            self, mock_skip_popup, mock_iface, mock_find_layer, mock_newdbpath,
            mock_verify, mock_locale, mock_createdb_crs_question,
            mock_messagebar):
        mock_find_layer.return_value.crs.return_value.authid.return_value = 'EPSG:3006'
        mock_createdb_crs_question.return_value = [3010, True]

        mock_newdbpath.return_value = (EXPORT_DB_PATH, '')
        mock_verify.return_value = 0

        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(1 1)', 3006))'''
        )
        db_utils.sql_alter_db('''INSERT INTO zz_staff (staff) VALUES ('s1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO comments (obsid, date_time, staff, comment) VALUES ('P1', '2015-01-01 00:00:00', 's1', 'comment1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_lab (obsid, parameter, report, staff) VALUES ('P1', 'labpar1', 'report1', 's1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_qual_field (obsid, parameter, staff, date_time, unit) VALUES ('P1', 'par1', 's1', '2015-01-01 01:00:00', 'unit1')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_flow (obsid, instrumentid, flowtype, date_time, unit) VALUES ('P1', 'inst1', 'Momflow', '2015-04-13 00:00:00', 'l/s')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO w_levels (obsid, date_time, meas) VALUES ('P1', '2015-01-02 00:00:01', '2')'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO stratigraphy (obsid, stratid, depthtop, depthbot) VALUES ('P1', 1, 0, 10)'''
        )
        db_utils.sql_alter_db(
            '''INSERT INTO obs_lines (obsid) VALUES ('L1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO seismic_data (obsid, length) VALUES ('L1', '5')''')
        db_utils.sql_alter_db(
            '''INSERT INTO meteo (obsid, instrumentid, parameter, date_time) VALUES ('P1', 'meteoinst', 'precip', '2017-01-01 00:19:00')'''
        )

        mock_locale.return_value.answer = 'ok'
        mock_locale.return_value.value = 'sv_SE'
        self.midvatten.export_spatialite()

        sql_list = [
            '''select obsid, ST_AsText(geometry) from obs_points''',
            '''select staff from zz_staff''',
            '''select obsid, date_time, staff, comment from comments''',
            '''select obsid, parameter, report, staff from w_qual_lab''',
            '''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            '''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            '''select obsid, date_time, meas from w_levels''',
            '''select obsid, stratid, depthtop, depthbot from stratigraphy''',
            '''select obsid from obs_lines''',
            '''select obsid, length from seismic_data''',
            '''select obsid, instrumentid, parameter, date_time from meteo'''
        ]

        conn = db_utils.connect_with_spatialite_connect(EXPORT_DB_PATH)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)
        """
        # The coordinates aquired from st_transform differs from Linux Mint 18.2 to Linux Mint 19
        # In Mint 18, it's -517888.383773 for both postgis and spatialite
        # In Mint 19, it's -517888.383737 for both postgis and spatialite
        #// I've made changes to the transformation so the above values no longer exists, but the previous issue probably does.
        # !!! No idea why
        
        reference_string = ['''[''',
                            '''select obsid, ST_AsText(geometry) from obs_points''',
                            ''', [(P1, POINT(-517888.392089 1.000667))], ''',
                            '''select staff from zz_staff''',
                            ''', [(s1)], ''',
                            '''select obsid, date_time, staff, comment from comments''',
                            ''', [(P1, 2015-01-01 00:00:00, s1, comment1)], ''',
                            '''select obsid, parameter, report, staff from w_qual_lab''',
                            ''', [(P1, labpar1, report1, s1)], ''',
                            '''select obsid, parameter, staff, date_time, comment from w_qual_field''',
                            ''', [(P1, par1, s1, 2015-01-01 01:00:00, None)], ''',
                            '''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
                            ''', [(P1, inst1, Momflow, 2015-04-13 00:00:00, l/s)], ''',
                            '''select obsid, date_time, meas from w_levels''',
                            ''', [(P1, 2015-01-02 00:00:01, 2.0)], ''',
                            '''select obsid, stratid, depthtop, depthbot from stratigraphy''',
                            ''', [(P1, 1, 0.0, 10.0)], ''',
                            '''select obsid from obs_lines''',
                            ''', [(L1)], ''',
                            '''select obsid, length from seismic_data''',
                            ''', [(L1, 5.0)], ''',
                            '''select obsid, instrumentid, parameter, date_time from meteo''',
                            ''', [(P1, meteoinst, precip, 2017-01-01 00:19:00)]]''']
        """
        reference_string = [
            '''[''', '''select obsid, ST_AsText(geometry) from obs_points''',
            ''', [(P1, POINT(-517888.383737 1.002821))], ''',
            '''select staff from zz_staff''', ''', [(s1)], ''',
            '''select obsid, date_time, staff, comment from comments''',
            ''', [(P1, 2015-01-01 00:00:00, s1, comment1)], ''',
            '''select obsid, parameter, report, staff from w_qual_lab''',
            ''', [(P1, labpar1, report1, s1)], ''',
            '''select obsid, parameter, staff, date_time, comment from w_qual_field''',
            ''', [(P1, par1, s1, 2015-01-01 01:00:00, None)], ''',
            '''select obsid, instrumentid, flowtype, date_time, unit from w_flow''',
            ''', [(P1, inst1, Momflow, 2015-04-13 00:00:00, l/s)], ''',
            '''select obsid, date_time, meas from w_levels''',
            ''', [(P1, 2015-01-02 00:00:01, 2.0)], ''',
            '''select obsid, stratid, depthtop, depthbot from stratigraphy''',
            ''', [(P1, 1, 0.0, 10.0)], ''', '''select obsid from obs_lines''',
            ''', [(L1)], ''', '''select obsid, length from seismic_data''',
            ''', [(L1, 5.0)], ''',
            '''select obsid, instrumentid, parameter, date_time from meteo''',
            ''', [(P1, meteoinst, precip, 2017-01-01 00:19:00)]]'''
        ]

        reference_string = '\n'.join(reference_string)
        print("Test\n" + test_string)
        print("Ref\n" + reference_string)
        assert test_string == reference_string

    @mock.patch('midvatten_utils.MessagebarAndLog')
    @mock.patch('midvatten_utils.QtWidgets.QInputDialog.getText')
    @mock.patch('create_db.utils.NotFoundQuestion')
    @mock.patch('midvatten_utils.Askuser', answer_yes.get_v)
    @mock.patch('midvatten_utils.get_selected_features_as_tuple',
                mock_selection.get_v)
    @mock.patch('midvatten_utils.verify_msettings_loaded_and_layer_edit_mode',
                autospec=True)
    @mock.patch('qgis.PyQt.QtWidgets.QFileDialog.getSaveFileName')
    @mock.patch('midvatten_utils.find_layer', autospec=True)
    @mock.patch('qgis.utils.iface', autospec=True)
    @mock.patch('export_data.utils.pop_up_info', autospec=True)
    def test_export_spatialite_zz_tables(self, mock_skip_popup, mock_iface,
                                         mock_find_layer, mock_newdbpath,
                                         mock_verify, mock_locale,
                                         mock_createdb_crs_question,
                                         mock_messagebar):
        mock_find_layer.return_value.crs.return_value.authid.return_value = 'EPSG:3006'
        mock_createdb_crs_question.return_value = [3006, True]
        dbconnection = db_utils.DbConnectionManager()
        mock_newdbpath.return_value = (EXPORT_DB_PATH, '')
        mock_verify.return_value = 0
        """
        insert into zz_strat(geoshort,strata) values('land fill','fyll');
        insert into zz_stratigraphy_plots (strata,color_mplot,hatch_mplot,color_qt,brush_qt) values('torv','DarkGray','+','darkGray','NoBrush');
        insert into zz_capacity (capacity,explanation) values('6 ','mycket god');
        insert into zz_capacity (capacity,explanation) values('6+','mycket god');
        insert into zz_capacity_plots (capacity,color_qt) values('', 'gray');
        """

        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid, geometry) VALUES ('P1', ST_GeomFromText('POINT(633466 711659)', 3006))''',
            dbconnection=dbconnection)
        dbconnection.execute('''PRAGMA foreign_keys='off'  ''')
        dbconnection.execute(
            '''UPDATE zz_strat SET strata = 'filling' WHERE geoshort = 'land fill' '''
        )
        dbconnection.execute(
            '''INSERT INTO zz_stratigraphy_plots (strata,color_mplot,hatch_mplot,color_qt,brush_qt) values ('filling','Yellow','+','darkGray','NoBrush') '''
        )
        dbconnection.execute(
            '''UPDATE zz_stratigraphy_plots SET color_mplot = 'OrangeFIX' WHERE strata = 'made ground' '''
        )
        dbconnection.execute(
            '''UPDATE zz_capacity SET explanation = 'anexpl' WHERE capacity = '0' '''
        )
        dbconnection.execute(
            '''UPDATE zz_capacity_plots SET color_qt = 'whiteFIX' WHERE capacity = '0' '''
        )
        #print(str(dbconnection.execute_and_fetchall('select * from zz_strat')))
        dbconnection.commit_and_closedb()
        print("Before export")
        mock_locale.return_value.answer = 'ok'
        mock_locale.return_value.value = 'en_US'

        self.midvatten.export_spatialite()
        sql_list = [
            '''SELECT geoshort, strata FROM zz_strat WHERE geoshort IN ('land fill', 'rock') ''',
            '''SELECT strata, color_mplot FROM zz_stratigraphy_plots WHERE strata IN ('made ground', 'rock', 'filling') ''',
            '''SELECT capacity, explanation FROM zz_capacity WHERE capacity IN ('0', '1')''',
            '''SELECT capacity, color_qt FROM zz_capacity_plots WHERE capacity IN ('0', '1') '''
        ]

        conn = db_utils.connect_with_spatialite_connect(EXPORT_DB_PATH)
        curs = conn.cursor()

        test_list = []
        for sql in sql_list:
            test_list.append('\n' + sql + '\n')
            test_list.append(curs.execute(sql).fetchall())

        conn.commit()
        conn.close()

        test_string = utils_for_tests.create_test_string(test_list)

        reference_string = [
            '''[''',
            '''SELECT geoshort, strata FROM zz_strat WHERE geoshort IN ('land fill', 'rock') ''',
            ''', [(land fill, filling), (rock, rock)], ''',
            '''SELECT strata, color_mplot FROM zz_stratigraphy_plots WHERE strata IN ('made ground', 'rock', 'filling') ''',
            ''', [(filling, Yellow), (made ground, OrangeFIX), (rock, red)], ''',
            '''SELECT capacity, explanation FROM zz_capacity WHERE capacity IN ('0', '1')''',
            ''', [(0, anexpl), (1, above gwl)], ''',
            '''SELECT capacity, color_qt FROM zz_capacity_plots WHERE capacity IN ('0', '1') ''',
            ''', [(0, whiteFIX), (1, red)]]'''
        ]

        reference_string = '\n'.join(reference_string)
        print(str(test_string))
        print(str(reference_string))
        assert test_string == reference_string

    def tearDown(self):
        #Delete database
        try:
            os.remove(EXPORT_DB_PATH)
        except OSError:
            pass

        for filename in TestExport.exported_csv_files:
            try:
                os.remove(filename)
            except OSError:
                pass

        super(self.__class__, self).tearDown()
    def test_wlvllogg_import_from_diveroffice_files_skip_obsid(self):
        files = [
            ('Location=rb1', 'Date/time,Water head[cm],Temperature[°C]',
             '2016/03/15 10:30:00,1,10', '2016/03/15 11:00:00,11,101'),
            ('Location=rb2', 'Date/time,Water head[cm],Temperature[°C]',
             '2016/04/15 10:30:00,2,20', '2016/04/15 11:00:00,21,201'),
            ('Location=rb3',
             'Date/time,Water head[cm],Temperature[°C],Conductivity[mS/cm]',
             '2016/05/15 10:30:00,3,30,5', '2016/05/15 11:00:00,31,301,6')
        ]

        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid) VALUES ('rb1')''')
        db_utils.sql_alter_db(
            '''INSERT INTO obs_points (obsid) VALUES ('rb2')''')

        DiverofficeImport.charsetchoosen = 'utf-8'
        with utils.tempinput('\n'.join(files[0]),
                             DiverofficeImport.charsetchoosen) as f1:
            with utils.tempinput('\n'.join(files[1]),
                                 DiverofficeImport.charsetchoosen) as f2:
                with utils.tempinput('\n'.join(files[2]),
                                     DiverofficeImport.charsetchoosen) as f3:

                    filenames = [f1, f2, f3]
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(
                        utils_askuser_answer_no_obj)

                    @mock.patch("midvatten_utils.MessagebarAndLog")
                    @mock.patch('import_data_to_db.utils.NotFoundQuestion')
                    @mock.patch('db_utils.QgsProject.instance',
                                utils_for_tests.MidvattenTestPostgisNotCreated.
                                mock_instance_settings_database)
                    @mock.patch('db_utils.get_postgis_connections',
                                utils_for_tests.MidvattenTestPostgisNotCreated.
                                mock_postgis_connections)
                    @mock.patch('import_data_to_db.utils.Askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('qgis.PyQt.QtWidgets.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info',
                                autospec=True)
                    @mock.patch('import_data_to_db.utils.select_files')
                    def _test_wlvllogg_import_from_diveroffice_files(
                            self, filenames, mock_filenames, mock_skippopup,
                            mock_encoding, mock_iface, mock_askuser,
                            mock_notfoundquestion, mock_messagebarandlog):

                        mocks_notfoundquestion = []
                        for answer, value in [['ok', 'rb1'], ['ok', 'rb2'],
                                              ['skip', 'rb3']]:
                            a_mock = MagicMock()
                            a_mock.answer = answer
                            a_mock.value = value
                            a_mock.reuse_column = 'location'
                            mocks_notfoundquestion.append(a_mock)

                        mock_notfoundquestion.side_effect = mocks_notfoundquestion

                        mock_filenames.return_value = filenames
                        mock_encoding.return_value = ['utf-8']

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = DiverofficeImport(self.iface.mainWindow(),
                                                     ms)
                        importer.select_files_and_load_gui()

                        importer.start_import(importer.files,
                                              importer.skip_rows.checked,
                                              importer.confirm_names.checked,
                                              importer.import_all_data.checked)

                        print('\n'.join([
                            str(x) for x in mock_messagebarandlog.mock_calls
                        ]))

                    _test_wlvllogg_import_from_diveroffice_files(
                        self, filenames)

                    test_string = utils_for_tests.create_test_string(
                        db_utils.sql_load_fr_db(
                            '''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger'''
                        ))
                    reference_string = r'''(True, [(rb1, 2016-03-15 10:30:00, 1.0, 10.0, None, None, None), (rb1, 2016-03-15 11:00:00, 11.0, 101.0, None, None, None), (rb2, 2016-04-15 10:30:00, 2.0, 20.0, None, None, None), (rb2, 2016-04-15 11:00:00, 21.0, 201.0, None, None, None)])'''
                    print(test_string)
                    print(reference_string)
                    assert test_string == reference_string
Example #8
0
    def test_import_w_levels(self):
        file = [u'obsid,date_time,meas',
                 u'rb1,2016-03-15 10:30:00,5.0']

        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("rb1")''')

        with utils.tempinput(u'\n'.join(file), u'utf-8') as filename:
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
                    @mock.patch('import_data_to_db.utils.askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch.object(PyQt4.QtGui.QFileDialog, 'getOpenFileName')
                    def _test(self, filename, mock_filename, mock_skippopup, mock_encoding, mock_iface, mock_askuser):

                        mock_filename.return_value = filename
                        mock_encoding.return_value = [u'utf-8', True]

                        def side_effect(*args, **kwargs):
                            mock_result = mock.MagicMock()
                            if u'msg' in kwargs:
                                if kwargs[u'msg'].startswith(u'Does the file contain a header?'):
                                    mock_result.result = 1
                                    return mock_result
                            if len(args) > 1:
                                if args[1].startswith(u'Do you want to confirm'):
                                    mock_result.result = 0
                                    return mock_result
                                    #mock_askuser.return_value.result.return_value = 0
                                elif args[1].startswith(u'Do you want to import all'):
                                    mock_result.result = 0
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nForeign keys'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nThere are'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'It is a strong recommendation'):
                                    mock_result.result = 0
                                    return mock_result
                        mock_askuser.side_effect = side_effect

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = GeneralCsvImportGui(self.iface.mainWindow(), ms)
                        importer.load_gui()

                        importer.load_files()
                        importer.table_chooser.import_method = u'w_levels'

                        for column in importer.table_chooser.columns:
                            names = {u'obsid': u'obsid', u'date_time': u'date_time', u'meas': u'meas'}
                            if column.db_column in names:
                                column.file_column_name = names[column.db_column]

                        importer.start_import()

                    _test(self, filename)
                    test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'''select obsid, date_time, meas, h_toc, level_masl, comment from w_levels'''))
                    reference_string = ur'''(True, [(rb1, 2016-03-15 10:30:00, 5.0, None, None, None)])'''
                    assert test_string == reference_string
Example #9
0
    def test_import_w_levels_many_rows(self):
        file = [u'obsid,date_time,meas']
        base = datestring_to_date(u'1900-01-01 00:01:01')
        date_list = [base + datetime.timedelta(days=x) for x in range(0, 10000)]
        file.extend([u'rb1,' + datetime.datetime.strftime(adate, u'%Y%M%D %H%m') + u',0.5' for adate in date_list])

        utils.sql_alter_db(u'''INSERT INTO obs_points ("obsid") VALUES ("rb1")''')

        with utils.tempinput(u'\n'.join(file), u'utf-8') as filename:
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
                    @mock.patch('import_data_to_db.utils.askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch.object(PyQt4.QtGui.QFileDialog, 'getOpenFileName')
                    def _test(self, filename, mock_filename, mock_skippopup, mock_encoding, mock_iface, mock_askuser):

                        mock_filename.return_value = filename
                        mock_encoding.return_value = [u'utf-8', True]

                        def side_effect(*args, **kwargs):
                            mock_result = mock.MagicMock()
                            if u'msg' in kwargs:
                                if kwargs[u'msg'].startswith(u'Does the file contain a header?'):
                                    mock_result.result = 1
                                    return mock_result
                            if len(args) > 1:
                                if args[1].startswith(u'Do you want to confirm'):
                                    mock_result.result = 0
                                    return mock_result
                                    #mock_askuser.return_value.result.return_value = 0
                                elif args[1].startswith(u'Do you want to import all'):
                                    mock_result.result = 0
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nForeign keys'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nThere are'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'It is a strong recommendation'):
                                    mock_result.result = 0
                                    return mock_result
                        mock_askuser.side_effect = side_effect

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = GeneralCsvImportGui(self.iface.mainWindow(), ms)
                        importer.load_gui()

                        importer.load_files()
                        importer.table_chooser.import_method = u'w_levels'

                        for column in importer.table_chooser.columns:
                            names = {u'obsid': u'obsid', u'date_time': u'date_time', u'meas': u'meas'}
                            if column.db_column in names:
                                column.file_column_name = names[column.db_column]

                        import_time = timeit.timeit(importer.start_import, number=1)
                        return import_time

                    import_time = _test(self, filename)
                    test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'''select count(*) from w_levels'''))
                    reference_string = ur'''(True, [(10000)])'''
                    assert import_time < 10
                    assert test_string == reference_string
Example #10
0
    def test_import_vlf_data_obsid_not_in_db(self):
        file = [u'obsid,length2,real_comp,imag_comp,comment',
                u'obsid2,500,2,10,acomment']

        utils.sql_alter_db(u'INSERT INTO obs_lines ("obsid") VALUES ("obsid1")')

        with utils.tempinput(u'\n'.join(file), u'utf-8') as filename:
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(utils_askuser_answer_no_obj)

                    @mock.patch('midvatten_utils.NotFoundQuestion', autospec=True)
                    @mock.patch('midvatten_utils.QgsProject.instance', MOCK_DBPATH.get_v)
                    @mock.patch('import_data_to_db.utils.askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info', autospec=True)
                    @mock.patch.object(PyQt4.QtGui.QFileDialog, 'getOpenFileName')
                    def _test(self, filename, mock_filename, mock_skippopup, mock_encoding, mock_iface, mock_askuser, mock_notfound):

                        mock_filename.return_value = filename
                        mock_encoding.return_value = [u'utf-8', True]

                        mock_notfound.return_value.answer = u'ok'
                        mock_notfound.return_value.value = u'obsid1'

                        def side_effect(*args, **kwargs):
                            mock_result = mock.MagicMock()
                            if u'msg' in kwargs:
                                if kwargs[u'msg'].startswith(u'Does the file contain a header?'):
                                    mock_result.result = 1
                                    return mock_result
                            if len(args) > 1:
                                if args[1].startswith(u'Do you want to confirm'):
                                    mock_result.result = 0
                                    return mock_result
                                    #mock_askuser.return_value.result.return_value = 0
                                elif args[1].startswith(u'Do you want to import all'):
                                    mock_result.result = 0
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nForeign keys'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'Please note!\nThere are'):
                                    mock_result.result = 1
                                    return mock_result
                                elif args[1].startswith(u'It is a strong recommendation'):
                                    mock_result.result = 0
                                    return mock_result
                        mock_askuser.side_effect = side_effect

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = GeneralCsvImportGui(self.iface.mainWindow(), ms)
                        importer.load_gui()

                        importer.load_files()
                        importer.table_chooser.import_method = u'vlf_data'

                        for column in importer.table_chooser.columns:
                            names = {u'obsid': u'obsid', u'length': u'length2', u'real_comp': u'real_comp', u'imag_comp': u'imag_comp', u'comment': u'comment'}
                            if column.db_column in names:
                                column.file_column_name = names[column.db_column]

                        importer.start_import()

                    _test(self, filename)
                    test_string = utils_for_tests.create_test_string(utils.sql_load_fr_db(u'''select * from vlf_data'''))
                    reference_string = u'''(True, [(obsid1, 500.0, 2.0, 10.0, acomment)])'''
                    assert test_string == reference_string
Example #11
0
    def test_wlvllogg_import_from_diveroffice_files(self):
        files = [
            (u'Location=rb1', u'Date/time,Water head[cm],Temperature[°C]',
             u'2016/03/15 10:30:00,1,10', u'2016/03/15 11:00:00,11,101'),
            (u'Location=rb2', u'Date/time,Water head[cm],Temperature[°C]',
             u'2016/04/15 10:30:00,2,20', u'2016/04/15 11:00:00,21,201'),
            (u'Location=rb3',
             u'Date/time,Water head[cm],Temperature[°C],Conductivity[mS/cm]',
             u'2016/05/15 10:30:00,3,30,5', u'2016/05/15 11:00:00,31,301,6')
        ]

        db_utils.sql_alter_db(
            u'''INSERT INTO obs_points (obsid) VALUES ('rb1')''')

        DiverofficeImport.charsetchoosen = u'utf-8'
        with utils.tempinput(u'\n'.join(files[0]),
                             DiverofficeImport.charsetchoosen) as f1:
            with utils.tempinput(u'\n'.join(files[1]),
                                 DiverofficeImport.charsetchoosen) as f2:
                with utils.tempinput(u'\n'.join(files[2]),
                                     DiverofficeImport.charsetchoosen) as f3:

                    filenames = [f1, f2, f3]
                    utils_askuser_answer_no_obj = MockUsingReturnValue(None)
                    utils_askuser_answer_no_obj.result = 0
                    utils_askuser_answer_no = MockUsingReturnValue(
                        utils_askuser_answer_no_obj)

                    @mock.patch('import_data_to_db.utils.NotFoundQuestion')
                    @mock.patch(
                        'db_utils.QgsProject.instance',
                        utils_for_tests.MidvattenTestSpatialiteNotCreated.
                        mock_instance_settings_database)
                    @mock.patch('import_data_to_db.utils.Askuser')
                    @mock.patch('qgis.utils.iface', autospec=True)
                    @mock.patch('PyQt4.QtGui.QInputDialog.getText')
                    @mock.patch('import_data_to_db.utils.pop_up_info',
                                autospec=True)
                    @mock.patch('import_data_to_db.utils.select_files')
                    def _test_wlvllogg_import_from_diveroffice_files(
                            self, filenames, mock_filenames, mock_skippopup,
                            mock_encoding, mock_iface, mock_askuser,
                            mock_notfoundquestion):
                        mock_notfoundquestion.return_value.answer = u'ok'
                        mock_notfoundquestion.return_value.value = u'rb1'
                        mock_notfoundquestion.return_value.reuse_column = u'location'
                        mock_filenames.return_value = filenames
                        mock_encoding.return_value = [u'utf-8']

                        ms = MagicMock()
                        ms.settingsdict = OrderedDict()
                        importer = DiverofficeImport(self.iface.mainWindow(),
                                                     ms)
                        importer.select_files_and_load_gui()

                        importer.start_import(importer.files,
                                              importer.skip_rows.checked,
                                              importer.confirm_names.checked,
                                              importer.import_all_data.checked)

                    _test_wlvllogg_import_from_diveroffice_files(
                        self, filenames)

                    test_string = utils_for_tests.create_test_string(
                        db_utils.sql_load_fr_db(
                            u'''SELECT obsid, date_time, head_cm, temp_degc, cond_mscm, level_masl, comment FROM w_levels_logger'''
                        ))
                    reference_string = ur'''(True, [(rb1, 2016-03-15 10:30:00, 1.0, 10.0, None, None, None), (rb1, 2016-03-15 11:00:00, 11.0, 101.0, None, None, None), (rb1, 2016-04-15 10:30:00, 2.0, 20.0, None, None, None), (rb1, 2016-04-15 11:00:00, 21.0, 201.0, None, None, None), (rb1, 2016-05-15 10:30:00, 3.0, 30.0, 5.0, None, None), (rb1, 2016-05-15 11:00:00, 31.0, 301.0, 6.0, None, None)])'''
                    assert test_string == reference_string