def add_logger_type(reader): """Insert logger type in file""" db = DbConnect(current_app.config) parsed_records = list() total_counter = 0 success_counter = 0 failure_counter = 0 error_message = None is_parse_error = False for record in reader: parsed_record_dict, is_parse_error = db.parse_logger_type(record) if not is_parse_error: parsed_records.append(parsed_record_dict) else: failure_counter += 1 total_counter = len(parsed_records) + failure_counter if len(parsed_records) > 0: success_counter = db.insert_logger_type(parsed_records) failure_counter += len(parsed_records) - success_counter db.close() return { "total": total_counter, "success": success_counter, "failure": failure_counter }, error_message
def add_logger_temp(reader, filename): '''Insert logger temperatures in uploaded file''' db = DbConnect(current_app.config) parsed_records = list() total_counter = 0 success_counter = 0 failure_counter = 0 error_message = None is_parse_error = False microsite_id = filename.rsplit('_', 5)[0].upper() logger_id = db.find_microsite_id(microsite_id) if logger_id is None: error_message = filename + ": The microsite_id does not exist. \ Please upload logger data type file first\n" return None, error_message for record in reader: parsed_record_dict, is_parse_error = db.parse_logger_temp(record) if not is_parse_error: parsed_records.append(parsed_record_dict) else: failure_counter += 1 total_counter = len(parsed_records) + failure_counter if len(parsed_records) > 0: success_counter = db.insert_logger_temp(parsed_records, logger_id) failure_counter += len(parsed_records) - success_counter db.close() return { "total": total_counter, "success": success_counter, "failure": failure_counter }, error_message
def submit_query(): '''get values of form and query Database to get preview results''' form = dict(request.args) query = dict() query["biomimic_type"] = form.get("biomimic_type")[0] query["country"] = form.get("country")[0] query["state_province"] = form['state_province'][0] query["location"] = form['location'][0] query["zone"] = form['zone'][0] query["sub_zone"] = form['sub_zone'][0] query["wave_exp"] = form['wave_exp'][0] query["output_type"] = form['output_type'][0] if form.get('analysis_type') is not None: query["analysis_type"] = form['analysis_type'][0] if (form.get('start_date')[0] != '') and (form.get('end_date')[0] != ''): query["start_date"] = str( datetime.datetime.strptime(form['start_date'][0], '%m/%d/%Y').date()) query["end_date"] = str( datetime.datetime.strptime(form['end_date'][0], '%m/%d/%Y').date()) session['query'] = query db_connection = DbConnect(current_app.config) preview_results, db_query = db_connection.get_query_results_preview(query) session['db_query'] = db_query db_connection.close() return jsonify(list_of_results=preview_results)
def queryDb(value_type, value): """Query Database to get options for each drop-down menu""" result, countRecords, minDate, maxDate = None, None, None, None db_connection = DbConnect(current_app.config) keyList = ['biomimic_type', 'country', 'state_province', 'location', 'zone', 'sub_zone', \ 'wave_exp', 'start_date', 'end_date', 'output_type', 'analysis_type'] # delete all keys in session variable "query" after the selected field for key in keyList[keyList.index(value_type) + 1:]: session['query'].pop(key, None) session['query'][value_type] = value if value_type == "biomimic_type": result, countRecords, minDate, maxDate = \ db_connection.fetch_distinct_countries_and_zones(session['query']) elif value_type == "country": result, countRecords, minDate, maxDate = \ db_connection.fetch_distinct_states(session['query']) elif value_type == "state_province": result, countRecords, minDate, maxDate = \ db_connection.fetch_distinct_locations(session['query']) elif value_type == "location": # location field doesn't have any associated dynamic behavior # except for fetching metadata. countRecords, minDate, maxDate = \ db_connection.fetch_metadata(session['query']) elif value_type == "zone": result, countRecords, minDate, maxDate = \ db_connection.fetch_distinct_sub_zones(session['query']) elif value_type == "sub_zone": result, countRecords, minDate, maxDate = \ db_connection.fetch_distinct_wave_exposures(session['query']) db_connection.close() return result, countRecords, minDate, maxDate
def add_logger_temp(reader, filename): '''Insert logger temperatures in uploaded file''' db = DbConnect(current_app.config) parsed_records = list() total_counter = 0 success_counter = 0 failure_counter = 0 error_message = None is_parse_error = False microsite_id = filename.rsplit('_', 5)[0].upper() logger_id = db.find_microsite_id(microsite_id) if logger_id is None: error_message = filename + ": The microsite_id does not exist. \ Please upload logger data type file first\n" return None, error_message for record in reader: parsed_record_dict, is_parse_error = db.parse_logger_temp(record) if not is_parse_error: parsed_records.append(parsed_record_dict) else: failure_counter += 1 total_counter = len(parsed_records) + failure_counter if len(parsed_records) > 0: success_counter = db.insert_logger_temp(parsed_records, logger_id) failure_counter += len(parsed_records) - success_counter db.close() return {"total": total_counter, "success": success_counter, "failure": failure_counter}, error_message
def download(): """ Create file in csv format and downloaded to user's computer""" db_connection = DbConnect(current_app.config) query = session['query'] time_title = '' if query.get("analysis_type") == "Daily": time_title = "Date" elif query.get("analysis_type") == "Monthly": time_title = "Month, Year" elif query.get("analysis_type") == "Yearly": time_title = "Year" else: time_title = "Timestamp" header = [ ("Output type", "District", "Site", "Reef", "Reef Location", "Depth"), [ query["output_type"], query["biomimic_type"], query["country"], query["state_province"], query["location"], query["zone"] ], (time_title, "Temperature") ] query_results = header + db_connection.get_query_raw_results( session['db_query']) db_connection.close() return excel.make_response_from_array(query_results, "csv", file_name="export_data")
def submit_query(): '''get values of form and query Database to get preview results''' form = dict(request.args) query = dict() query["biomimic_type"] = form.get("biomimic_type")[0] query["country"] = form.get("country")[0] query["state_province"] = form['state_province'][0] query["location"] = form['location'][0] query["zone"] = form['zone'][0] query["sub_zone"] = form['sub_zone'][0] query["wave_exp"] = form['wave_exp'][0] query["output_type"] = form['output_type'][0] if form.get('analysis_type') is not None: query["analysis_type"] = form['analysis_type'][0] if (form.get('start_date')[0] != '') and (form.get('end_date')[0] != ''): query["start_date"] = str(datetime.datetime .strptime(form['start_date'][0], '%m/%d/%Y') .date()) query["end_date"] = str(datetime.datetime .strptime(form['end_date'][0], '%m/%d/%Y') .date()) session['query'] = query db_connection = DbConnect(current_app.config) preview_results, db_query = db_connection.get_query_results_preview(query) session['db_query'] = db_query db_connection.close() return jsonify(list_of_results=preview_results)
def query(): """Function for rendering the query form""" try: error = None db_connection = DbConnect(current_app.config) biomimic_type_choices = db_connection.fetch_biomimic_types() db_connection.close() form = QueryForm(request.form) form.biomimic_type.choices = biomimic_type_choices session['query'] = dict() if request.method == 'GET': form.process() else: pass return render_template('query.html', title='Query', form=form, error=error) except TemplateNotFound: abort(404)
def query(): error = None db = DbConnect(app.config) logger_type_choices = db.getLoggerTypes() db.close() form = QueryForm(request.form) form.logger_type.choices = logger_type_choices if request.method == 'GET': form.process() else: query = {} query["logger_type"] = form.logger_type.data, query["country_name"] = form.country_name.data, if form.state_name.data is not None: query["state_name"] = form.state_name.data, if form.location_name.data is not None: query["location_name"] = form.location_name.data, if form.zone_name.data is not None: query["zone_name"] = form.zone_name.data, if form.sub_zone_name.data is not None: query["sub_zone_name"] = form.sub_zone_name.data, if form.wave_exp_name.data is not None: query["wave_exp"] = form.wave_exp_name.data, query["start_date"] = form.date_pick_from.data.strftime('%m/%d/%Y'), query["end_date"] = form.date_pick_to.data.strftime('%m/%d/%Y') # print("query: ", query) # print("Query form submitted") db = DbConnect(app.config) query_results = db.getQueryResults(query) db.close() # print("results: ", query_results) flash(query_results) # if form.validate_on_submit(): # flash("Query details. logger_type: '%s', country_name: '%s', \ # state_name: '%s', location_name: '%s', \ # Date From: '%s', Date To: '%s' " % \ # else: # error = 'Invalid Submission. All fields marked with * are compulsory' return render_template('query.html', title='Query', form=form, error=error)
def test_db_connection_positive(self): """"Test MySQL connection for connection""" data = True try: with self.app.app_context(): db = DbConnect(self.app.config) cursor = db.connection.cursor() res = cursor.execute('SELECT * from cnx_logger_biomimic_type LIMIT 2') res = cursor.execute('SELECT * from cnx_logger_properties LIMIT 2') res = cursor.execute('SELECT * from cnx_logger_geographics LIMIT 2') res = cursor.execute('SELECT * from cnx_logger LIMIT 2') res = cursor.execute('SELECT * from cnx_logger_temperature LIMIT 2') except (MySQLdb.OperationalError, MySQLdb.ProgrammingError) as e: data = None finally: cursor.close() db.close() self.assertNotEqual(data, None)
def download(): """ Create file in csv format and downloaded to user's computer""" db_connection = DbConnect(current_app.config) query = session['query'] time_title = '' if query.get("analysis_type") == "Daily": time_title = "Date" elif query.get("analysis_type") == "Monthly": time_title = "Month, Year" elif query.get("analysis_type") == "Yearly": time_title = "Year" else: time_title = "Timestamp" header = [[key + ":" + str(value) for key, value in query.items()], (time_title, "Temperature")] query_results = header + db_connection.get_query_raw_results(session['db_query']) db_connection.close() return excel.make_response_from_array(query_results, "csv", file_name="export_data")
def download(): """ Create file in csv format and downloaded to user's computer""" db_connection = DbConnect(current_app.config) query = session['query'] time_title = '' if query.get("analysis_type") == "Daily": time_title = "Date" elif query.get("analysis_type") == "Monthly": time_title = "Month, Year" elif query.get("analysis_type") == "Yearly": time_title = "Year" else: time_title = "Timestamp" header = [[key + ":" + str(value) for key, value in query.items()], (time_title, "Temperature")] query_results = header + db_connection.get_query_raw_results( session['db_query']) db_connection.close() return excel.make_response_from_array(query_results, "csv", file_name="export_data")
def add_logger_type(reader): """Insert logger type in file""" db = DbConnect(current_app.config) parsed_records = list() total_counter = 0 success_counter = 0 failure_counter = 0 error_message = None is_parse_error = False for record in reader: parsed_record_dict, is_parse_error = db.parse_logger_type(record) if not is_parse_error: parsed_records.append(parsed_record_dict) else: failure_counter += 1 total_counter = len(parsed_records) + failure_counter if len(parsed_records) > 0: success_counter = db.insert_logger_type(parsed_records) failure_counter += len(parsed_records) - success_counter db.close() return {"total": total_counter, "success": success_counter, "failure": failure_counter}, error_message
def test_db_connection_positive(self): """"Test MySQL connection for connection""" data = True try: with self.app.app_context(): db = DbConnect(self.app.config) cursor = db.connection.cursor() res = cursor.execute( 'SELECT * from cnx_logger_biomimic_type LIMIT 2') res = cursor.execute( 'SELECT * from cnx_logger_properties LIMIT 2') res = cursor.execute( 'SELECT * from cnx_logger_geographics LIMIT 2') res = cursor.execute('SELECT * from cnx_logger LIMIT 2') res = cursor.execute( 'SELECT * from cnx_logger_temperature LIMIT 2') except (MySQLdb.OperationalError, MySQLdb.ProgrammingError) as e: data = None finally: cursor.close() db.close() self.assertNotEqual(data, None)
class UploadTestEdgeCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def cleanUpLoggerTemp(self, cursor): ''' clean up table cnx_logger_temperature''' cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute( "DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=\'%s\'" % (logger_temp_id)) self.db.connection.commit() self.cleanUpMetadataTable(cursor) def cleanUpLoggerType(self, cursor, rec): ''' clean up logger type tables''' biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s" % (logger_id)) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", biomimic_id) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", geo_id) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", prop_id) self.db.connection.commit() def cleanUpMetadataTable(self, cursor): ''' clean up table cnx_logger_metadata''' cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute( "DELETE FROM `cnx_logger_metadata` WHERE logger_id=\'%s\'", (logger_id, )) self.db.connection.commit() def test_logger_type_upload_MicrositeId_None(self): """Test that upload Logger Type file without microsite_id will not be inserted to database""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_MicrositeId_None.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post( '/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_MicrositeId_None.csv') }, follow_redirects=True) query = ( "SELECT * from cnx_logger_biomimic_type where biomimic_type='DummyBiomimicTypeNone'" ) cursor = self.db.connection.cursor() cursor.execute(query) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) def test_logger_temperature_upload_duplicate(self): """Test that Logger Temperature file with duplicate entry cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql_Duplicate.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql_Duplicate.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.cleanUpLoggerTemp(cursor) self.cleanUpLoggerType(cursor, record_type) cursor.close() self.assertEqual(len(results), 1)
class QueryFormTestCase(unittest.TestCase): """Query Form Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" app.config['TESTING'] = True self.db = DbConnect(app.config) def tearDown(self): """Close test database""" self.db.close() def stringToBytes(self, stringValue): """Convert Strings to their Bytes representation""" return bytes(stringValue, 'UTF-8') def test_form_logger_type_automatic_fill(self): """Test the logger_type field is filled automatically on page load""" with app.test_client() as client: response = client.get('/query') logger_type_choices = self.db.getLoggerTypes() for logger_type in logger_type_choices: self.assertIn(self.stringToBytes(logger_type[0]), response.data) def check_ajax(self, selected_type, selected_value, dbFunction): """Helper Function to test the ajax call functionality when given selected type field is selected with given value""" with app.test_client() as client: response = client.get('/_parse_data', query_string=dict( select_type=selected_type, select_value=selected_value)) self.assertEqual(selected_type, request.args.get('select_type')) self.assertEqual(selected_value, request.args.get('select_value')) choices = dbFunction(request.args.get('select_value')) for choice in choices: self.assertIn(self.stringToBytes(choice[0]), response.data) def test_form_logger_type_select(self): """Test the ajax call functionality if logger_type field is selected""" self.check_ajax("logger_type", "robomussel", self.db.getCountry) def test_form_country_name_select(self): """Test the ajax call functionality if country_name field is selected""" self.check_ajax("country_name", "USA", self.db.getState) def test_form_state_name_select(self): """Test the ajax call functionality if state_name field is selected""" self.check_ajax("state_name", "California", self.db.getLocation) def test_form_location_name_select(self): """Test the ajax call functionality if location_name field is selected""" self.check_ajax("lt_for_zone", "robomussel", self.db.getZone) def test_form_Zone_name_select(self): """Test the ajax call functionality if zone_name field is selected""" self.check_ajax("lt_for_subzone", "robomussel", self.db.getSubZone) def test_form_SubZone_name_select(self): """Test the ajax call functionality if subZone_name field is selected""" self.check_ajax("lt_for_wave_exp", "robomussel", self.db.getWaveExp) def test_query_results(self): """Test the query results functionality""" with app.test_client() as client: response = client.post('/query', data=dict(logger_type="robomussel", country_name="Canada", state_name="British Columbia", location_name="Seppings Island", zone_name="High", sub_zone_name="High", wave_exp_name="exposed", date_pick_from=datetime.date( 1995, 4, 15).strftime('%m/%d/%Y'), date_pick_to=datetime.date( 2016, 4, 30).strftime('%m/%d/%Y')), follow_redirects=True) self.assertIn( b"[('robomussel', 13.98), ('robomussel', 12.87)]", response.data)
class UploadTestCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app("tests.config") self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def clean_up_logger_temp(self, cursor): """ clean up table cnx_logger_temperature""" cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute("DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=%s", (logger_temp_id,)) self.db.connection.commit() self.clean_up_metadata_table(cursor) def clean_up_logger_type(self, cursor, rec): """ clean up logger type tables""" biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get("biomimic_type")) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get("microsite_id")) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s", (logger_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", (biomimic_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", (geo_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", (prop_id,)) self.db.connection.commit() def clean_up_metadata_table(self, cursor): """ clean up table cnx_logger_metadata""" cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute("DELETE FROM `cnx_logger_metadata` WHERE logger_id='%s'", (logger_id,)) self.db.connection.commit() def build_type_where_condition(self, queryDict): """Builds the where_condition for the Select Query""" where = """ WHERE biotype.`biomimic_type`=\'%s\' AND geo.`country`=\'%s\' AND geo.`state_province`= \'%s\' AND geo.`location`=\'%s\'""" % ( queryDict.get("biomimic_type"), queryDict.get("country"), queryDict.get("state_province"), queryDict.get("location"), ) if queryDict.get("zone") != "All": where += " AND prop.`zone`='%s'" % (queryDict.get("zone")) if queryDict.get("sub_zone") != "All": where += " AND prop.`sub_zone`='%s'" % (queryDict.get("sub_zone")) if queryDict.get("wave_exp") != "All": if queryDict.get("wave_exp") == "None": where += " and prop.wave_exp is Null" else: where += " AND prop.`wave_exp`='%s' " % (queryDict.get("wave_exp")) return where def test_uploaded_logger_type_file_extension(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (BytesIO(b"logger Type File"), "correctExtensionLoggerTypeFile.csv")}, follow_redirects=True, ) self.assertNotIn(b"File correctExtensionLoggerTypeFile.csv should be in csv format", response.data) def test_uploaded_logger_temp_file_extension(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTempFile": (BytesIO(b"logger Temp File"), "correctExtensionLoggerTempFile.csv")}, follow_redirects=True, ) self.assertNotIn(b"File correctExtensionLoggerTempFile.pdf should be in csv or txt format", response.data) def test_uploaded_logger_type_file_extension_negative(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (BytesIO(b"logger Type File"), "incorrectExtensionLoggerTypeFile.txt")}, follow_redirects=True, ) self.assertIn(b"File incorrectExtensionLoggerTypeFile.txt should be in csv format", response.data) def test_uploaded_logger_temp_file_extension_negative(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTempFile": (BytesIO(b"logger Temp File"), "incorrectExtensionLoggerTypeFile.pdf")}, follow_redirects=True, ) self.assertIn(b"File incorrectExtensionLoggerTypeFile.pdf should be in csv or txt format", response.data) def test_uploaded_logger_type_file_missing(self): """Test that uploaded logger type file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (BytesIO(b"logger Type File"), "")}, follow_redirects=True ) self.assertIn(b"Please choose a file first", response.data) def test_uploaded_logger_temp_file_missing(self): """Test that uploaded logger temp file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTempFile": (BytesIO(b"logger Temp File"), "")}, follow_redirects=True ) self.assertIn(b"Please choose a file first", response.data) def test_logger_type_upload(self): """Test that file with valid Type uploads is inserted in DB.""" test_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" microsite_id = None with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record) query = ( "SELECT log.microsite_id " "FROM `cnx_logger` log " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = log.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = log.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = log.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchone() self.clean_up_logger_type(cursor, record) cursor.close() if results is not None: microsite_id = results[0] self.assertEqual(record["microsite_id"], microsite_id) self.assertIn(b"<td># Proper Records</td>\n <td>1</td>", response.data) self.assertIn(b"<td># Corrupt Records</td>\n <td>0</td>", response.data) def test_logger_type_upload_corrupt(self): """Test that Logger Type file with corrupt records cannot be uploaded""" test_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Corrupt.csv" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_filename, "rb"), "Test_New_Logger_Type_Corrupt.csv")}, follow_redirects=True, ) record_corrupt_ncolumns = { "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record_corrupt_ncolumns) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) record_corrupt_coordinates = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "A36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record_corrupt_coordinates) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) cursor.close() def test_logger_type_upload_duplicate(self): """Test that Logger Type file with duplicate Microsite Id cannot be uploaded""" test_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Duplicate.csv" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_filename, "rb"), "Test_New_Logger_Type_Duplicate.csv")}, follow_redirects=True, ) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", } where_condition = self.build_type_where_condition(record) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record) cursor.close() self.assertEqual(len(results), 1) def test_logger_temperature_upload(self): """Test that file with valid Temperature uploads is inserted in DB.""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID_2000_pgsql.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } record_temp = [{"Time_GMT": "7/1/2000 2:01", "Temp_C": 14}, {"Time_GMT": "7/1/2000 2:21", "Temp_C": 13.5}] where_condition = self.db.build_where_condition(record_type) query = ( "SELECT DATE_FORMAT(temp.Time_GMT,'%m/%d/%Y %H:%i'), temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() if results is not None: results = list(results) cursor.close() self.assertEqual( datetime.strptime(record_temp[0]["Time_GMT"], "%m/%d/%Y %H:%M"), datetime.strptime(results[0][0], "%m/%d/%Y %H:%M"), ) self.assertEqual(record_temp[0]["Temp_C"], results[0][1]) self.assertEqual( datetime.strptime(record_temp[1]["Time_GMT"], "%m/%d/%Y %H:%M"), datetime.strptime(results[1][0], "%m/%d/%Y %H:%M"), ) self.assertEqual(record_temp[1]["Temp_C"], results[1][1]) self.assertIn(b"<td># Proper Records</td>\n <td>6</td>", response.data) self.assertIn(b"<td># Corrupt Records</td>\n <td>0</td>", response.data) query = """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""" cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"],)) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 6) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2002, 8, 16, 9, 41)) def test_logger_temperature_upload_corrupt(self): """Test that Logger Temperature file with corrupt records cannot be uploaded""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID_2000_corrupt.csv" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID_2000_corrupt.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_temperature_upload_missing(self): """Test that Logger Temperature file with missing Microsite Id cannot be uploaded""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID2_2000_Missing_Type.txt" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID2_2000_Missing_Type.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_metadata_update(self): """Test that metadata table gets updated with subsequent inserts in DB.""" test_type_filename = "server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv" test_temp_filename = "server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt" test_temp_filename2 = "server/tests/test_data_files/Test/temp_files/DUMMYID_2001_pgsql.txt" with self.app.test_client() as client: with client.session_transaction() as sess: sess["logged_in"] = True response = client.post( "/upload", data={"loggerTypeFile": (open(test_type_filename, "rb"), "Test_New_Logger_Type_Positive.csv")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename, "rb"), "DUMMYID_2000_pgsql.txt")}, follow_redirects=True, ) response = client.post( "/upload", data={"loggerTempFile": (open(test_temp_filename2, "rb"), "DUMMYID_2001_pgsql.txt")}, follow_redirects=True, ) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", "%m/%d/%Y").date()), "end_date": str(datetime.strptime("7/2/2000", "%m/%d/%Y").date()), } query = """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""" cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"],)) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 12) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2006, 8, 16, 9, 41))
class UploadTestCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def clean_up_logger_temp(self, cursor): ''' clean up table cnx_logger_temperature''' cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute( "DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=%s", (logger_temp_id, )) self.db.connection.commit() self.clean_up_metadata_table(cursor) def clean_up_logger_type(self, cursor, rec): ''' clean up logger type tables''' biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s", (logger_id, )) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", (biomimic_id, )) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", (geo_id, )) self.db.connection.commit() res = cursor.execute( "DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", (prop_id, )) self.db.connection.commit() def clean_up_metadata_table(self, cursor): ''' clean up table cnx_logger_metadata''' cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute( "DELETE FROM `cnx_logger_metadata` WHERE logger_id=\'%s\'", (logger_id, )) self.db.connection.commit() def build_type_where_condition(self, queryDict): """Builds the where_condition for the Select Query""" where = """ WHERE biotype.`biomimic_type`=\'%s\' AND geo.`country`=\'%s\' AND geo.`state_province`= \'%s\' AND geo.`location`=\'%s\'""" % \ (queryDict.get('biomimic_type'), queryDict.get('country'), \ queryDict.get('state_province'), queryDict.get('location')) if queryDict.get('zone') != "All": where += " AND prop.`zone`=\'%s\'" % (queryDict.get('zone')) if queryDict.get('sub_zone') != "All": where += " AND prop.`sub_zone`=\'%s\'" % ( queryDict.get('sub_zone')) if queryDict.get('wave_exp') != "All": if (queryDict.get('wave_exp') == 'None'): where += " and prop.wave_exp is Null" else: where += " AND prop.`wave_exp`=\'%s\' " % ( queryDict.get('wave_exp')) return where def test_uploaded_logger_type_file_extension(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (BytesIO(b'logger Type File'), 'correctExtensionLoggerTypeFile.csv') }, follow_redirects=True) self.assertNotIn( b'File correctExtensionLoggerTypeFile.csv should be in csv format', response.data) def test_uploaded_logger_temp_file_extension(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTempFile': (BytesIO(b'logger Temp File'), 'correctExtensionLoggerTempFile.csv') }, follow_redirects=True) self.assertNotIn( b'File correctExtensionLoggerTempFile.pdf should be in csv or txt format', response.data) def test_uploaded_logger_type_file_extension_negative(self): """Test that uploaded logger type file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (BytesIO(b'logger Type File'), 'incorrectExtensionLoggerTypeFile.txt') }, follow_redirects=True) self.assertIn( b'File incorrectExtensionLoggerTypeFile.txt should be in csv format', response.data) def test_uploaded_logger_temp_file_extension_negative(self): """Test that uploaded logger temperature file has correct extensions""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTempFile': (BytesIO(b'logger Temp File'), 'incorrectExtensionLoggerTypeFile.pdf') }, follow_redirects=True) self.assertIn( b'File incorrectExtensionLoggerTypeFile.pdf should be in csv or txt format', response.data) def test_uploaded_logger_type_file_missing(self): """Test that uploaded logger type file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post( '/upload', data={'loggerTypeFile': (BytesIO(b'logger Type File'), '')}, follow_redirects=True) self.assertIn(b'Please choose a file first', response.data) def test_uploaded_logger_temp_file_missing(self): """Test that uploaded logger temp file is not missing""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post( '/upload', data={'loggerTempFile': (BytesIO(b'logger Temp File'), '')}, follow_redirects=True) self.assertIn(b'Please choose a file first', response.data) def test_logger_type_upload(self): """Test that file with valid Type uploads is inserted in DB.""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' microsite_id = None with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition(record) query = ( "SELECT log.microsite_id " "FROM `cnx_logger` log " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = log.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = log.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = log.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchone() self.clean_up_logger_type(cursor, record) cursor.close() if results is not None: microsite_id = results[0] self.assertEqual(record['microsite_id'], microsite_id) self.assertIn( b"<td># Proper Records</td>\n <td>1</td>", response.data) self.assertIn( b"<td># Corrupt Records</td>\n <td>0</td>", response.data) def test_logger_type_upload_corrupt(self): """Test that Logger Type file with corrupt records cannot be uploaded""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Corrupt.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_Corrupt.csv') }, follow_redirects=True) record_corrupt_ncolumns = { "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition( record_corrupt_ncolumns) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) record_corrupt_coordinates = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "A36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition( record_corrupt_coordinates) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) cursor.close() def test_logger_type_upload_duplicate(self): """Test that Logger Type file with duplicate Microsite Id cannot be uploaded""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Duplicate.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_Duplicate.csv') }, follow_redirects=True) record = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave" } where_condition = self.build_type_where_condition(record) query = ( "SELECT logger.microsite_id " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record) cursor.close() self.assertEqual(len(results), 1) def test_logger_temperature_upload(self): """Test that file with valid Temperature uploads is inserted in DB.""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } record_temp = [{ "Time_GMT": "7/1/2000 2:01", "Temp_C": 14 }, { "Time_GMT": "7/1/2000 2:21", "Temp_C": 13.5 }] where_condition = self.db.build_where_condition(record_type) query = ( "SELECT DATE_FORMAT(temp.Time_GMT,'%m/%d/%Y %H:%i'), temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() if results is not None: results = list(results) cursor.close() self.assertEqual( datetime.strptime(record_temp[0]['Time_GMT'], '%m/%d/%Y %H:%M'), datetime.strptime(results[0][0], '%m/%d/%Y %H:%M')) self.assertEqual(record_temp[0]['Temp_C'], results[0][1]) self.assertEqual( datetime.strptime(record_temp[1]['Time_GMT'], '%m/%d/%Y %H:%M'), datetime.strptime(results[1][0], '%m/%d/%Y %H:%M')) self.assertEqual(record_temp[1]['Temp_C'], results[1][1]) self.assertIn( b"<td># Proper Records</td>\n <td>6</td>", response.data) self.assertIn( b"<td># Corrupt Records</td>\n <td>0</td>", response.data) query = ( """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""") cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"], )) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 6) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2002, 8, 16, 9, 41)) def test_logger_temperature_upload_corrupt(self): """Test that Logger Temperature file with corrupt records cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_corrupt.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_corrupt.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_temperature_upload_missing(self): """Test that Logger Temperature file with missing Microsite Id cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID2_2000_Missing_Type.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID2_2000_Missing_Type.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } where_condition = self.db.build_where_condition(record_type) query = ( "SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` " ) cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(len(results), 0) def test_logger_metadata_update(self): """Test that metadata table gets updated with subsequent inserts in DB.""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt' test_temp_filename2 = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2001_pgsql.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql.txt') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename2, 'rb'), 'DUMMYID_2001_pgsql.txt') }, follow_redirects=True) record_type = { "microsite_id": "DUMMYID", "site": "DUMMYSITE", "biomimic_type": "Dummybiomimictype", "country": "Dummycountry", "state_province": "Dummystate", "location": "Dummylocation", "field_lat": "36.621933330000", "field_lon": "-121.905316700000", "zone": "DummyZone", "sub_zone": "DummySubZone", "wave_exp": "DummyWave", "start_date": str(datetime.strptime("7/1/2000", '%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000", '%m/%d/%Y').date()) } query = ( """SELECT SUM(meta.logger_count), MIN(meta.logger_min_date), MAX(meta.logger_max_date) FROM `cnx_logger_metadata` meta INNER JOIN `cnx_logger` log ON log.`logger_id`=meta.`logger_id` WHERE log.`microsite_id`=%s""") cursor = self.db.connection.cursor() cursor.execute(query, (record_type["microsite_id"], )) results = cursor.fetchone() if results is not None: count = results[0] min_date = results[1] max_date = results[2] self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, record_type) cursor.close() self.assertEqual(count, 12) self.assertEqual(min_date, datetime(2000, 7, 1, 2, 1)) self.assertEqual(max_date, datetime(2006, 8, 16, 9, 41))
class QueryFormTestCase(unittest.TestCase): """Query Form Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql.txt') }, follow_redirects=True) self.record_type = { "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave"} def tearDown(self): """Close test database""" cursor = self.db.connection.cursor() self.clean_up_logger_temp(cursor) self.clean_up_logger_type(cursor, self.record_type) cursor.close() self.db.close() def stringToBytes(self, stringValue): """Convert Strings to their Bytes representation""" return bytes(stringValue, 'UTF-8') def clean_up_logger_temp(self, cursor): cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute("DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=\'%s\'" % (logger_temp_id)) self.db.connection.commit() def clean_up_logger_type(self, cursor, rec): biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s" % (logger_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", (biomimic_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", (geo_id,)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", (prop_id,)) self.db.connection.commit() def test_form_logger_type_automatic_fill(self): """Test the logger_type field is filled automatically on page load""" with self.app.test_client() as client: response = client.get('/query') biomimic_type_choices = self.db.fetch_biomimic_types() for biomimic_type in biomimic_type_choices: self.assertIn(self.stringToBytes(biomimic_type[0]), response.data) def check_ajax(self, selected_type, selected_value, dbFunction): """Helper Function to test the ajax call functionality when given selected type field is selected with given value""" with self.app.test_client() as client: with client.session_transaction() as sess: sess['query'] = self.record_type response = client.get('/_parse_data', query_string=dict( select_type=selected_type, select_value=selected_value)) self.assertEqual(selected_type, request.args.get('select_type')) self.assertEqual(selected_value, request.args.get('select_value')) choices = dbFunction(session['query']) for choice in choices[0]: self.assertIn(self.stringToBytes(choice[0]), response.data) def test_form_select_biomimic_type(self): """Test the ajax call functionality if logger_type field is selected""" selected_type = "biomimic_type" selected_value = "DummyBiomimicType" with self.app.test_client() as client: with client.session_transaction() as sess: sess['query'] = self.record_type response = client.get('/_parse_data', query_string=dict( select_type=selected_type, select_value=selected_value)) self.assertEqual(selected_type, request.args.get('select_type')) self.assertEqual(selected_value, request.args.get('select_value')) choices = self.db.fetch_distinct_countries_and_zones(self.record_type) country_list = choices[0]["country"] zone_list = choices[0]["zone"] for country in country_list: self.assertIn(self.stringToBytes(country), response.data) for zone in zone_list: self.assertIn(self.stringToBytes(zone), response.data) def test_form_select_country_name(self): """Test the ajax call functionality if country field is selected""" self.check_ajax("country", "DummyCountry", self.db.fetch_distinct_states) def test_form_select_state_province(self): """Test the ajax call functionality if state_province field is selected""" self.check_ajax("state_province", "DummyState", self.db.fetch_distinct_locations) def test_form_select_zone_name(self): """Test the ajax call functionality if zone field is selected""" self.check_ajax("zone", "DummyZone", self.db.fetch_distinct_sub_zones) def test_form_select_sub_zone_name(self): """Test the ajax call functionality if sub_zone_name field is selected""" self.check_ajax("sub_zone", "DummySubZone", self.db.fetch_distinct_wave_exposures) def test_query_results_raw(self): """Test the query results functionality for Raw""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Raw"}, follow_redirects=False) self.assertIn(b"14", response.data) self.assertIn(b"13.5", response.data) # Merging with the above test case, since we are storing the query in the sessin variable """Test the download functionality""" response = client.get('/download') self.assertIn(b"14", response.data) self.assertIn(b"13.5", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_min_daily(self): """Test the query results functionality for Min Daily""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Min", "analysis_type" : "Daily"}, follow_redirects=False) self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_max_daily(self): """Test the query results functionality for Max Daily""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Max", "analysis_type" : "Daily"}, follow_redirects=False) self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_average_daily(self): """Test the query results functionality for Average Daily""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "7/1/2000", "end_date": "7/2/2000", "output_type" : "Average", "analysis_type" : "Daily"}, follow_redirects=False) self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_min_monthly(self): """Test the query results functionality for Min Monthly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Min", "analysis_type" : "Monthly"}, follow_redirects=False) self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_max_monthly(self): """Test the query results functionality for Max Monthly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Max", "analysis_type" : "Monthly"}, follow_redirects=False) self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_average_monthly(self): """Test the query results functionality for Average Monthly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Average", "analysis_type" : "Monthly"}, follow_redirects=False) self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7.0", response.data) self.assertNotIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"13.75", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"13.5", response.data) self.assertIn(b"15.0", response.data) self.assertIn(b"7.0", response.data) self.assertNotIn(b"10", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_min_yearly(self): """Test the query results functionality for Min Yearly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Min", "analysis_type" : "Yearly"}, follow_redirects=False) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"20.0", response.data) self.assertIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_max_yearly(self): """Test the query results functionality for Max Yearly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Max", "analysis_type" : "Yearly"}, follow_redirects=False) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"20.0", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data) def test_query_results_average_yearly(self): """Test the query results functionality for Average Yearly""" with self.app.test_client() as client: response = client.get('/_submit_query', query_string={ "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": "1/1/2000", "end_date": "1/1/2003", "output_type" : "Average", "analysis_type" : "Yearly"}, follow_redirects=False) self.assertIn(b"14.375", response.data) self.assertNotIn(b"14.0", response.data) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) #Test the download functionality response = client.get('/download') self.assertIn(b"14.375", response.data) self.assertNotIn(b"13.5", response.data) self.assertNotIn(b"14.0", response.data) self.assertNotIn(b"20.0", response.data) self.assertNotIn(b"10", response.data) self.assertIn(b"15", response.data) self.assertIn(b"7", response.data) self.assertIn(b"biomimic_type:Dummybiomimictype", response.data)
class QueryFormTestCase(unittest.TestCase): """Query Form Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" app.config['TESTING'] = True self.db = DbConnect(app.config) def tearDown(self): """Close test database""" self.db.close() def stringToBytes(self, stringValue): """Convert Strings to their Bytes representation""" return bytes(stringValue, 'UTF-8') def test_form_logger_type_automatic_fill(self): """Test the logger_type field is filled automatically on page load""" with app.test_client() as client: response = client.get('/query') logger_type_choices = self.db.getLoggerTypes() for logger_type in logger_type_choices: self.assertIn(self.stringToBytes(logger_type[0]), response.data) def check_ajax(self, selected_type, selected_value, dbFunction): """Helper Function to test the ajax call functionality when given selected type field is selected with given value""" with app.test_client() as client: response = client.get('/_parse_data', query_string=dict( select_type=selected_type, select_value=selected_value)) self.assertEqual(selected_type, request.args.get('select_type')) self.assertEqual(selected_value, request.args.get('select_value')) choices = dbFunction(request.args.get('select_value')) for choice in choices: self.assertIn(self.stringToBytes(choice[0]), response.data) def test_form_logger_type_select(self): """Test the ajax call functionality if logger_type field is selected""" self.check_ajax("logger_type", "robomussel", self.db.getCountry) def test_form_country_name_select(self): """Test the ajax call functionality if country_name field is selected""" self.check_ajax("country_name", "USA", self.db.getState) def test_form_state_name_select(self): """Test the ajax call functionality if state_name field is selected""" self.check_ajax("state_name", "California", self.db.getLocation) def test_form_location_name_select(self): """Test the ajax call functionality if location_name field is selected""" self.check_ajax("lt_for_zone", "robomussel", self.db.getZone) def test_form_Zone_name_select(self): """Test the ajax call functionality if zone_name field is selected""" self.check_ajax("lt_for_subzone", "robomussel", self.db.getSubZone) def test_form_SubZone_name_select(self): """Test the ajax call functionality if subZone_name field is selected""" self.check_ajax("lt_for_wave_exp", "robomussel", self.db.getWaveExp) def test_query_results(self): """Test the query results functionality""" with app.test_client() as client: response = client.post('/query', data=dict( logger_type="robomussel", country_name="Canada", state_name="British Columbia", location_name="Seppings Island", zone_name="High", sub_zone_name="High", wave_exp_name="exposed", date_pick_from=datetime.date(1995, 4, 15).strftime('%m/%d/%Y'), date_pick_to=datetime.date(2016, 4, 30).strftime('%m/%d/%Y')), follow_redirects=True) self.assertIn(b"[('robomussel', 13.98), ('robomussel', 12.87)]", response.data)
class UploadTestEdgeCase(unittest.TestCase): """Upload Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" self.app = create_app('tests.config') self.db = DbConnect(self.app.config) def tearDown(self): """Close test database""" self.db.close() def cleanUpLoggerTemp(self, cursor): ''' clean up table cnx_logger_temperature''' cursor.execute("SELECT logger_temp_id FROM `cnx_logger_temperature`") results = cursor.fetchall() if results is not None: results = list(results) logger_temp_ids = [result[0] for result in results] for logger_temp_id in logger_temp_ids: res = cursor.execute("DELETE FROM `cnx_logger_temperature` WHERE logger_temp_id=\'%s\'" % (logger_temp_id)) self.db.connection.commit() self.cleanUpMetadataTable(cursor) def cleanUpLoggerType(self, cursor, rec): ''' clean up logger type tables''' biomimic_id = self.db.fetch_existing_bio_id(cursor, rec.get('biomimic_type')) geo_id = self.db.fetch_existing_geo_id(cursor, rec) prop_id = self.db.fetch_existing_prop_id(cursor, rec) logger_id = self.db.find_microsite_id(rec.get('microsite_id')) res = cursor.execute("DELETE FROM `cnx_logger` WHERE logger_id=%s" % (logger_id)) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_biomimic_type` WHERE biomimic_id=%s", biomimic_id) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_geographics` WHERE geo_id=%s", geo_id) self.db.connection.commit() res = cursor.execute("DELETE FROM `cnx_logger_properties` WHERE prop_id=%s", prop_id) self.db.connection.commit() def cleanUpMetadataTable(self, cursor): ''' clean up table cnx_logger_metadata''' cursor.execute("SELECT logger_id FROM `cnx_logger_metadata`") results = cursor.fetchall() if results is not None: results = list(results) logger_ids = [result[0] for result in results] for logger_id in logger_ids: res = cursor.execute("DELETE FROM `cnx_logger_metadata` WHERE logger_id=\'%s\'", (logger_id,)) self.db.connection.commit() def test_logger_type_upload_MicrositeId_None(self): """Test that upload Logger Type file without microsite_id will not be inserted to database""" test_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_MicrositeId_None.csv' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_filename, 'rb'), 'Test_New_Logger_Type_MicrositeId_None.csv') }, follow_redirects=True) query = ("SELECT * from cnx_logger_biomimic_type where biomimic_type='DummyBiomimicTypeNone'") cursor = self.db.connection.cursor() cursor.execute(query) results = cursor.fetchall() results = list(results) self.assertEqual(len(results), 0) def test_logger_temperature_upload_duplicate(self): """Test that Logger Temperature file with duplicate entry cannot be uploaded""" test_type_filename = 'server/tests/test_data_files/Test/Test_New_Logger_Type_Positive.csv' test_temp_filename = 'server/tests/test_data_files/Test/temp_files/DUMMYID_2000_pgsql_Duplicate.txt' with self.app.test_client() as client: with client.session_transaction() as sess: sess['logged_in'] = True response = client.post('/upload', data={ 'loggerTypeFile': (open(test_type_filename, 'rb'), 'Test_New_Logger_Type_Positive.csv') }, follow_redirects=True) response = client.post('/upload', data={ 'loggerTempFile': (open(test_temp_filename, 'rb'), 'DUMMYID_2000_pgsql_Duplicate.txt') }, follow_redirects=True) record_type = { "microsite_id" : "DUMMYID", "site" : "DUMMYSITE", "biomimic_type" : "Dummybiomimictype", "country" : "Dummycountry", "state_province" : "Dummystate", "location" : "Dummylocation", "field_lat" : "36.621933330000", "field_lon" : "-121.905316700000", "zone" : "DummyZone", "sub_zone" : "DummySubZone", "wave_exp" : "DummyWave", "start_date": str(datetime.strptime("7/1/2000",'%m/%d/%Y').date()), "end_date": str(datetime.strptime("7/2/2000",'%m/%d/%Y').date())} where_condition = self.db.build_where_condition(record_type) query = ("SELECT temp.Time_GMT, temp.Temp_C " "FROM `cnx_logger` logger " "INNER JOIN `cnx_logger_biomimic_type` biotype ON biotype.`biomimic_id` = logger.`biomimic_id` " "INNER JOIN `cnx_logger_geographics` geo ON geo.`geo_id` = logger.`geo_id` " "INNER JOIN `cnx_logger_properties` prop ON prop.`prop_id` = logger.`prop_id` " "INNER JOIN `cnx_logger_temperature` temp ON temp.`logger_id` = logger.`logger_id` ") cursor = self.db.connection.cursor() cursor.execute(query + where_condition) results = cursor.fetchall() results = list(results) self.cleanUpLoggerTemp(cursor) self.cleanUpLoggerType(cursor, record_type) cursor.close() self.assertEqual(len(results), 1)
class QueryFormTestCase(unittest.TestCase): """Query Form Feature specific Test Cases will go here""" def setUp(self): """Setup test app""" app.config['TESTING'] = True self.db = DbConnect(app.config) def tearDown(self): """Close test database""" self.db.close() def stringToBytes(self, stringValue): """Convert Strings to their Bytes representation""" return bytes(stringValue, 'UTF-8') def test_form_logger_type_automatic_fill(self): """Test the logger_type field is filled automatically on page load""" with app.test_client() as client: response = client.get('/query') logger_type_choices = self.db.getLoggerTypes() for logger_type in logger_type_choices: self.assertIn(self.stringToBytes(logger_type[0]), response.data) def check_ajax(self, selected_type, selected_value, dbFunction): """Helper Function to test the ajax call functionality when given selected type field is selected with given value""" with app.test_client() as client: response = client.get('/_parse_data', query_string=dict( select_type=selected_type, select_value=selected_value)) self.assertEqual(selected_type, request.args.get('select_type')) self.assertEqual(selected_value, request.args.get('select_value')) choices = dbFunction(request.args.get('select_value')) for choice in choices: self.assertIn(self.stringToBytes(choice[0]), response.data) def test_form_logger_type_select(self): """Test the ajax call functionality if logger_type field is selected""" self.check_ajax("logger_type", "robomussel", self.db.getCountry) def test_form_country_name_select(self): """Test the ajax call functionality if country_name field is selected""" self.check_ajax("country_name", "USA", self.db.getState) def test_form_state_name_select(self): """Test the ajax call functionality if state_name field is selected""" self.check_ajax("state_name", "California", self.db.getLocation) def test_form_location_name_select(self): """Test the ajax call functionality if location_name field is selected""" self.check_ajax("lt_for_zone", "robomussel", self.db.getZone) def test_form_Zone_name_select(self): """Test the ajax call functionality if zone_name field is selected""" self.check_ajax("lt_for_subzone", "robomussel", self.db.getSubZone) def test_form_SubZone_name_select(self): """Test the ajax call functionality if subZone_name field is selected""" self.check_ajax("lt_for_wave_exp", "robomussel", self.db.getWaveExp) def test_query_results(self): """Test the query results functionality""" with app.test_client() as client: response = client.get('/_submit_query', query_string={ 'sub_zone_name': ['High'], 'end_date': ['04/12/2016'], 'state_name': ['British Columbia'], 'wave_exp': ['exposed'], 'location_name': ['Seppings Island'], 'zone_name': ['High'], 'logger_type': ['robomussel'], 'start_date': ['03/01/2016'], 'country_name': ['Canada']}, follow_redirects=True) self.assertIn(b"13.98", response.data) self.assertIn(b"12.87", response.data)