def add_logger_temp(reader, filename): '''Insert logger temperatures in uploaded file''' db = DbConnect(current_app.config) parsed_records = list() total_counter = 0 success_counter = 0 failure_counter = 0 error_message = None is_parse_error = False microsite_id = filename.rsplit('_', 5)[0].upper() logger_id = db.find_microsite_id(microsite_id) if logger_id is None: error_message = filename + ": The microsite_id does not exist. \ Please upload logger data type file first\n" return None, error_message for record in reader: parsed_record_dict, is_parse_error = db.parse_logger_temp(record) if not is_parse_error: parsed_records.append(parsed_record_dict) else: failure_counter += 1 total_counter = len(parsed_records) + failure_counter if len(parsed_records) > 0: success_counter = db.insert_logger_temp(parsed_records, logger_id) failure_counter += len(parsed_records) - success_counter db.close() return {"total": total_counter, "success": success_counter, "failure": failure_counter}, error_message
def add_logger_temp(reader, filename): '''Insert logger temperatures in uploaded file''' db = DbConnect(current_app.config) parsed_records = list() total_counter = 0 success_counter = 0 failure_counter = 0 error_message = None is_parse_error = False microsite_id = filename.rsplit('_', 5)[0].upper() logger_id = db.find_microsite_id(microsite_id) if logger_id is None: error_message = filename + ": The microsite_id does not exist. \ Please upload logger data type file first\n" return None, error_message for record in reader: parsed_record_dict, is_parse_error = db.parse_logger_temp(record) if not is_parse_error: parsed_records.append(parsed_record_dict) else: failure_counter += 1 total_counter = len(parsed_records) + failure_counter if len(parsed_records) > 0: success_counter = db.insert_logger_temp(parsed_records, logger_id) failure_counter += len(parsed_records) - success_counter db.close() return { "total": total_counter, "success": success_counter, "failure": failure_counter }, error_message
class UploadTestEdgeCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def cleanUpLoggerTemp(self, cursor): ''' clean up table cnx_logger_temperature''' cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute( "DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=\'%s\'" % (logger_temp_id)) self.db.connection.commit() self.cleanUpMetadataTable(cursor) def cleanUpLoggerType(self, cursor, rec): ''' clean up logger type tables''' biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s" % (logger_id)) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", biomimic_id) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", geo_id) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", prop_id) self.db.connection.commit() def cleanUpMetadataTable(self, cursor): ''' clean up table cnx_logger_metadata''' cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute( "DELETE FROM `cnx_logger_metadata` WHERE logger_id=\'%s\'", (logger_id, )) self.db.connection.commit() def test_logger_type_upload_MicrositeId_None(self): """Test that upload Logger Type file without microsite_id will not be inserted to database""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_MicrositeId_None.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post( '/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_MicrositeId_None.csv') }, follow_redirects=True) query = ( "SELECT * from cnx_logger_biomimic_type where biomimic_type='DummyBiomimicTypeNone'" ) cursor = self.db.connection.cursor() cursor.execute(query) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) def test_logger_temperature_upload_duplicate(self): """Test that Logger Temperature file with duplicate entry cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql_Duplicate.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql_Duplicate.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.cleanUpLoggerTemp(cursor) self.cleanUpLoggerType(cursor, record_type) cursor.close() self.assertEqual(len(results), 1)
class QueryFormTestCase(unittest.TestCase): """Query Form Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql.txt') }, follow_redirects=True) self.record_type = { "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave"} def tearDown(self): """Close test database""" cursor = self.db.connection.cursor() self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, self.record_type) cursor.close() self.db.close() def stringToBytes(self, stringValue): """Convert Strings to their Bytes representation""" return bytes(stringValue, 'UTF-8') def clean_up_logger_temp(self, cursor): cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute("DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=\'%s\'" % (logger_temp_id)) self.db.connection.commit() def clean_up_logger_type(self, cursor, rec): biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s" % (logger_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", (biomimic_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", (geo_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", (prop_id,)) self.db.connection.commit() def test_form_logger_type_automatic_fill(self): """Test the logger_type field is filled automatically on page load""" with self.app.test_client() as client: response = client.get('/query') biomimic_type_choices = self.db.fetch_biomimic_types() for biomimic_type in biomimic_type_choices: self.assertIn(self.stringToBytes(biomimic_type[0]), response.data) def check_ajax(self, selected_type, selected_value, dbFunction): """Helper Function to test the ajax call functionality when given selected type field is selected with given value""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['query'] = self.record_type response = client.get('/_parse_data', query_string=dict( select_type=selected_type, select_value=selected_value)) self.assertEqual(selected_type, request.args.get('select_type')) self.assertEqual(selected_value, request.args.get('select_value')) choices = dbFunction(session['query']) for choice in choices[0]: self.assertIn(self.stringToBytes(choice[0]), response.data) def test_form_select_biomimic_type(self): """Test the ajax call functionality if logger_type field is selected""" selected_type = "biomimic_type" selected_value = "DummyBiomimicType" with self.app.test_client() as client: with client.session_transaction() as sess: sess['query'] = self.record_type response = client.get('/_parse_data', query_string=dict( select_type=selected_type, select_value=selected_value)) self.assertEqual(selected_type, request.args.get('select_type')) self.assertEqual(selected_value, request.args.get('select_value')) choices = self.db.fetch_distinct_countries_and_zones(self.record_type) country_list = choices[0]["country"] zone_list = choices[0]["zone"] for country in country_list: self.assertIn(self.stringToBytes(country), response.data) for zone in zone_list: self.assertIn(self.stringToBytes(zone), response.data) def test_form_select_country_name(self): """Test the ajax call functionality if country field is selected""" self.check_ajax("country", "DummyCountry", self.db.fetch_distinct_states) def test_form_select_state_province(self): """Test the ajax call functionality if state_province field is selected""" self.check_ajax("state_province", "DummyState", self.db.fetch_distinct_locations) def test_form_select_zone_name(self): """Test the ajax call functionality if zone field is selected""" self.check_ajax("zone", "DummyZone", self.db.fetch_distinct_sub_zones) def test_form_select_sub_zone_name(self): """Test the ajax call functionality if sub_zone_name field is selected""" self.check_ajax("sub_zone", "DummySubZone", self.db.fetch_distinct_wave_exposures) def test_query_results_raw(self): """Test the query results functionality for Raw""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Raw"}, follow_redirects=False) self.assertIn(b"14", response.data) self.assertIn(b"13.5", response.data) # Merging with the above test case, since we are storing the query in the sessin variable """Test the download functionality""" response = client.get('/download') self.assertIn(b"14", response.data) self.assertIn(b"13.5", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_min_daily(self): """Test the query results functionality for Min Daily""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Min", "analysis_type" : "Daily"}, follow_redirects=False) self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_max_daily(self): """Test the query results functionality for Max Daily""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Max", "analysis_type" : "Daily"}, follow_redirects=False) self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_average_daily(self): """Test the query results functionality for Average Daily""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Average", "analysis_type" : "Daily"}, follow_redirects=False) self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_min_monthly(self): """Test the query results functionality for Min Monthly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Min", "analysis_type" : "Monthly"}, follow_redirects=False) self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_max_monthly(self): """Test the query results functionality for Max Monthly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Max", "analysis_type" : "Monthly"}, follow_redirects=False) self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_average_monthly(self): """Test the query results functionality for Average Monthly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Average", "analysis_type" : "Monthly"}, follow_redirects=False) self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7.0", response.data) self.assertNotIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7.0", response.data) self.assertNotIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_min_yearly(self): """Test the query results functionality for Min Yearly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Min", "analysis_type" : "Yearly"}, follow_redirects=False) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_max_yearly(self): """Test the query results functionality for Max Yearly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Max", "analysis_type" : "Yearly"}, follow_redirects=False) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_average_yearly(self): """Test the query results functionality for Average Yearly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Average", "analysis_type" : "Yearly"}, follow_redirects=False) self.assertIn(b"14.375", response.data) self.assertNotIn(b"14.0", response.data) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"14.375", response.data) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14.0", response.data) self.assertNotIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data)
class UploadTestCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def clean_up_logger_temp(self, cursor): ''' clean up table cnx_logger_temperature''' cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute( "DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=%s", (logger_temp_id, )) self.db.connection.commit() self.clean_up_metadata_table(cursor) def clean_up_logger_type(self, cursor, rec): ''' clean up logger type tables''' biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s", (logger_id, )) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", (biomimic_id, )) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", (geo_id, )) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", (prop_id, )) self.db.connection.commit() def clean_up_metadata_table(self, cursor): ''' clean up table cnx_logger_metadata''' cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute( "DELETE FROM `cnx_logger_metadata` WHERE logger_id=\'%s\'", (logger_id, )) self.db.connection.commit() def build_type_where_condition(self, queryDict): """Builds the where_condition for the Select Query""" where = """ WHERE biotype.`biomimic_type`=\'%s\' AND geo.`country`=\'%s\' AND geo.`state_province`= \'%s\' AND geo.`location`=\'%s\'""" % \ (queryDict.get('biomimic_type'), queryDict.get('country'), \ queryDict.get('state_province'), queryDict.get('location')) if queryDict.get('zone') != "All": where += " AND prop.`zone`=\'%s\'" % (queryDict.get('zone')) if queryDict.get('sub_zone') != "All": where += " AND prop.`sub_zone`=\'%s\'" % ( queryDict.get('sub_zone')) if queryDict.get('wave_exp') != "All": if (queryDict.get('wave_exp') == 'None'): where += " and prop.wave_exp is Null" else: where += " AND prop.`wave_exp`=\'%s\' " % ( queryDict.get('wave_exp')) return where def test_uploaded_logger_type_file_extension(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (BytesIO(b'logger Type File'), 'correctExtensionLoggerTypeFile.csv') }, follow_redirects=True) self.assertNotIn( b'File correctExtensionLoggerTypeFile.csv should be in csv format', response.data) def test_uploaded_logger_temp_file_extension(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTempFile': (BytesIO(b'logger Temp File'), 'correctExtensionLoggerTempFile.csv') }, follow_redirects=True) self.assertNotIn( b'File correctExtensionLoggerTempFile.pdf should be in csv or txt format', response.data) def test_uploaded_logger_type_file_extension_negative(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (BytesIO(b'logger Type File'), 'incorrectExtensionLoggerTypeFile.txt') }, follow_redirects=True) self.assertIn( b'File incorrectExtensionLoggerTypeFile.txt should be in csv format', response.data) def test_uploaded_logger_temp_file_extension_negative(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTempFile': (BytesIO(b'logger Temp File'), 'incorrectExtensionLoggerTypeFile.pdf') }, follow_redirects=True) self.assertIn( b'File incorrectExtensionLoggerTypeFile.pdf should be in csv or txt format', response.data) def test_uploaded_logger_type_file_missing(self): """Test that uploaded logger type file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post( '/upload', data={'loggerTypeFile': (BytesIO(b'logger Type File'), '')}, follow_redirects=True) self.assertIn(b'Please choose a file first', response.data) def test_uploaded_logger_temp_file_missing(self): """Test that uploaded logger temp file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post( '/upload', data={'loggerTempFile': (BytesIO(b'logger Temp File'), '')}, follow_redirects=True) self.assertIn(b'Please choose a file first', response.data) def test_logger_type_upload(self): """Test that file with valid Type uploads is inserted in DB.""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' microsite_id = None with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition(record) query = ( "SELECT log.microsite_id " "FROM `cnx_logger` log " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = log.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = log.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = log.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchone() self.clean_up_logger_type(cursor, record) cursor.close() if results is not None: microsite_id = results[0] self.assertEqual(record['microsite_id'], microsite_id) self.assertIn( b"<td># Proper Records</td>\n <td>1</td>", response.data) self.assertIn( b"<td># Corrupt Records</td>\n <td>0</td>", response.data) def test_logger_type_upload_corrupt(self): """Test that Logger Type file with corrupt records cannot be uploaded""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Corrupt.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_Corrupt.csv') }, follow_redirects=True) record_corrupt_ncolumns = { "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition( record_corrupt_ncolumns) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) record_corrupt_coordinates = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "A36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition( record_corrupt_coordinates) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) cursor.close() def test_logger_type_upload_duplicate(self): """Test that Logger Type file with duplicate Microsite Id cannot be uploaded""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Duplicate.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_Duplicate.csv') }, follow_redirects=True) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition(record) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record) cursor.close() self.assertEqual(len(results), 1) def test_logger_temperature_upload(self): """Test that file with valid Temperature uploads is inserted in DB.""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } record_temp = [{ "Time_GMT": "7/1/2000 2:01", "Temp_C": 14 }, { "Time_GMT": "7/1/2000 2:21", "Temp_C": 13.5 }] where_condition = self.db.build_where_condition(record_type) query = ( "SELECT DATE_FORMAT(temp.Time_GMT,'%m/%d/%Y %H:%i'), temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() if results is not None: results = list(results) cursor.close() self.assertEqual( datetime.strptime(record_temp[0]['Time_GMT'], '%m/%d/%Y %H:%M'), datetime.strptime(results[0][0], '%m/%d/%Y %H:%M')) self.assertEqual(record_temp[0]['Temp_C'], results[0][1]) self.assertEqual( datetime.strptime(record_temp[1]['Time_GMT'], '%m/%d/%Y %H:%M'), datetime.strptime(results[1][0], '%m/%d/%Y %H:%M')) self.assertEqual(record_temp[1]['Temp_C'], results[1][1]) self.assertIn( b"<td># Proper Records</td>\n <td>6</td>", response.data) self.assertIn( b"<td># Corrupt Records</td>\n <td>0</td>", response.data) query = ( """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""") cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"], )) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 6) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2002, 8, 16, 9, 41)) def test_logger_temperature_upload_corrupt(self): """Test that Logger Temperature file with corrupt records cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_corrupt.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_corrupt.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_temperature_upload_missing(self): """Test that Logger Temperature file with missing Microsite Id cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID2_2000_Missing_Type.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID2_2000_Missing_Type.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_metadata_update(self): """Test that metadata table gets updated with subsequent inserts in DB.""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt' test_temp_filename2 = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2001_pgsql.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql.txt') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename2, 'rb'), 'DUMMYID_2001_pgsql.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } query = ( """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""") cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"], )) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 12) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2006, 8, 16, 9, 41))
class UploadTestCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app("tests.config") self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def clean_up_logger_temp(self, cursor): """ clean up table cnx_logger_temperature""" cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute("DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=%s", (logger_temp_id,)) self.db.connection.commit() self.clean_up_metadata_table(cursor) def clean_up_logger_type(self, cursor, rec): """ clean up logger type tables""" biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get("biomimic_type")) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get("microsite_id")) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s", (logger_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", (biomimic_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", (geo_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", (prop_id,)) self.db.connection.commit() def clean_up_metadata_table(self, cursor): """ clean up table cnx_logger_metadata""" cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute("DELETE FROM `cnx_logger_metadata` WHERE logger_id='%s'", (logger_id,)) self.db.connection.commit() def build_type_where_condition(self, queryDict): """Builds the where_condition for the Select Query""" where = """ WHERE biotype.`biomimic_type`=\'%s\' AND geo.`country`=\'%s\' AND geo.`state_province`= \'%s\' AND geo.`location`=\'%s\'""" % ( queryDict.get("biomimic_type"), queryDict.get("country"), queryDict.get("state_province"), queryDict.get("location"), ) if queryDict.get("zone") != "All": where += " AND prop.`zone`='%s'" % (queryDict.get("zone")) if queryDict.get("sub_zone") != "All": where += " AND prop.`sub_zone`='%s'" % (queryDict.get("sub_zone")) if queryDict.get("wave_exp") != "All": if queryDict.get("wave_exp") == "None": where += " and prop.wave_exp is Null" else: where += " AND prop.`wave_exp`='%s' " % (queryDict.get("wave_exp")) return where def test_uploaded_logger_type_file_extension(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (BytesIO(b"logger Type File"), "correctExtensionLoggerTypeFile.csv")}, follow_redirects=True, ) self.assertNotIn(b"File correctExtensionLoggerTypeFile.csv should be in csv format", response.data) def test_uploaded_logger_temp_file_extension(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTempFile": (BytesIO(b"logger Temp File"), "correctExtensionLoggerTempFile.csv")}, follow_redirects=True, ) self.assertNotIn(b"File correctExtensionLoggerTempFile.pdf should be in csv or txt format", response.data) def test_uploaded_logger_type_file_extension_negative(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (BytesIO(b"logger Type File"), "incorrectExtensionLoggerTypeFile.txt")}, follow_redirects=True, ) self.assertIn(b"File incorrectExtensionLoggerTypeFile.txt should be in csv format", response.data) def test_uploaded_logger_temp_file_extension_negative(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTempFile": (BytesIO(b"logger Temp File"), "incorrectExtensionLoggerTypeFile.pdf")}, follow_redirects=True, ) self.assertIn(b"File incorrectExtensionLoggerTypeFile.pdf should be in csv or txt format", response.data) def test_uploaded_logger_type_file_missing(self): """Test that uploaded logger type file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (BytesIO(b"logger Type File"), "")}, follow_redirects=True ) self.assertIn(b"Please choose a file first", response.data) def test_uploaded_logger_temp_file_missing(self): """Test that uploaded logger temp file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTempFile": (BytesIO(b"logger Temp File"), "")}, follow_redirects=True ) self.assertIn(b"Please choose a file first", response.data) def test_logger_type_upload(self): """Test that file with valid Type uploads is inserted in DB.""" test_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" microsite_id = None with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record) query = ( "SELECT log.microsite_id " "FROM `cnx_logger` log " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = log.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = log.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = log.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchone() self.clean_up_logger_type(cursor, record) cursor.close() if results is not None: microsite_id = results[0] self.assertEqual(record["microsite_id"], microsite_id) self.assertIn(b"<td># Proper Records</td>\n <td>1</td>", response.data) self.assertIn(b"<td># Corrupt Records</td>\n <td>0</td>", response.data) def test_logger_type_upload_corrupt(self): """Test that Logger Type file with corrupt records cannot be uploaded""" test_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Corrupt.csv" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_filename, "rb"), "Test_New_Logger_Type_Corrupt.csv")}, follow_redirects=True, ) record_corrupt_ncolumns = { "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record_corrupt_ncolumns) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) record_corrupt_coordinates = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "A36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record_corrupt_coordinates) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) cursor.close() def test_logger_type_upload_duplicate(self): """Test that Logger Type file with duplicate Microsite Id cannot be uploaded""" test_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Duplicate.csv" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_filename, "rb"), "Test_New_Logger_Type_Duplicate.csv")}, follow_redirects=True, ) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record) cursor.close() self.assertEqual(len(results), 1) def test_logger_temperature_upload(self): """Test that file with valid Temperature uploads is inserted in DB.""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID_2000_pgsql.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } record_temp = [{"Time_GMT": "7/1/2000 2:01", "Temp_C": 14}, {"Time_GMT": "7/1/2000 2:21", "Temp_C": 13.5}] where_condition = self.db.build_where_condition(record_type) query = ( "SELECT DATE_FORMAT(temp.Time_GMT,'%m/%d/%Y %H:%i'), temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() if results is not None: results = list(results) cursor.close() self.assertEqual( datetime.strptime(record_temp[0]["Time_GMT"], "%m/%d/%Y %H:%M"), datetime.strptime(results[0][0], "%m/%d/%Y %H:%M"), ) self.assertEqual(record_temp[0]["Temp_C"], results[0][1]) self.assertEqual( datetime.strptime(record_temp[1]["Time_GMT"], "%m/%d/%Y %H:%M"), datetime.strptime(results[1][0], "%m/%d/%Y %H:%M"), ) self.assertEqual(record_temp[1]["Temp_C"], results[1][1]) self.assertIn(b"<td># Proper Records</td>\n <td>6</td>", response.data) self.assertIn(b"<td># Corrupt Records</td>\n <td>0</td>", response.data) query = """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""" cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"],)) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 6) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2002, 8, 16, 9, 41)) def test_logger_temperature_upload_corrupt(self): """Test that Logger Temperature file with corrupt records cannot be uploaded""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID_2000_corrupt.csv" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID_2000_corrupt.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_temperature_upload_missing(self): """Test that Logger Temperature file with missing Microsite Id cannot be uploaded""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID2_2000_Missing_Type.txt" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID2_2000_Missing_Type.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_metadata_update(self): """Test that metadata table gets updated with subsequent inserts in DB.""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt" test_temp_filename2 = "server/tests/test_data_files/Test/temp_files/DUMMYID_2001_pgsql.txt" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID_2000_pgsql.txt")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename2, "rb"), "DUMMYID_2001_pgsql.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } query = """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""" cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"],)) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 12) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2006, 8, 16, 9, 41))
class UploadTestEdgeCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def cleanUpLoggerTemp(self, cursor): ''' clean up table cnx_logger_temperature''' cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute("DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=\'%s\'" % (logger_temp_id)) self.db.connection.commit() self.cleanUpMetadataTable(cursor) def cleanUpLoggerType(self, cursor, rec): ''' clean up logger type tables''' biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s" % (logger_id)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", biomimic_id) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", geo_id) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", prop_id) self.db.connection.commit() def cleanUpMetadataTable(self, cursor): ''' clean up table cnx_logger_metadata''' cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute("DELETE FROM `cnx_logger_metadata` WHERE logger_id=\'%s\'", (logger_id,)) self.db.connection.commit() def test_logger_type_upload_MicrositeId_None(self): """Test that upload Logger Type file without microsite_id will not be inserted to database""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_MicrositeId_None.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_MicrositeId_None.csv') }, follow_redirects=True) query = ("SELECT * from cnx_logger_biomimic_type where biomimic_type='DummyBiomimicTypeNone'") cursor = self.db.connection.cursor() cursor.execute(query) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) def test_logger_temperature_upload_duplicate(self): """Test that Logger Temperature file with duplicate entry cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql_Duplicate.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql_Duplicate.txt') }, follow_redirects=True) record_type = { "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": str(datetime.strptime("7/1/2000",'%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000",'%m/%d/%Y').date())} where_condition = self.db.build_where_condition(record_type) query = ("SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` ") cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.cleanUpLoggerTemp(cursor) self.cleanUpLoggerType(cursor, record_type) cursor.close() self.assertEqual(len(results), 1)