def insert_to_postgres(sensor_list: List, stored_sensor_ids: List) -> None: """Insert sensor list details to local Postgres DB""" with UseDatabase(aurora_creds) as cursor: sql_create = """CREATE TABLE IF NOT EXISTS all_sensor_info( sensor_id varchar(8), days_active integer, start_date timestamp, end_date timestamp, latitude numeric, longitude numeric, address varchar(140), owner_id varchar(36));""" cursor.execute(sql_create) for i in sensor_list: if i[0] in stored_sensor_ids: cursor.execute("""UPDATE all_sensor_info SET days_active = %s, start_date = %s, end_date = %s WHERE sensor_id = %s""", (i[1], i[2], i[3], i[0])) # VALUES(%s, %s, %s, %s, %s, %s, %s, %s)""", # (i[0], i[1], i[2], i[3], i[4], i[5], i[6], i[7])) else: cursor.execute("""INSERT INTO all_sensor_info VALUES(%s, %s, %s, %s, %s, %s, %s, %s)""", (i[0], i[1], i[2], i[3], i[4], i[5], i[6], i[7]))
def faulty_stats() -> 'JSON': """Return most recent faulty GROW data""" with UseDatabase(aurora_creds) as cursor: owner = request.args.get('owner_id') sql_faulty = sql.SQL("""SELECT sensor_id FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly < 2) AND sensor_id IN (SELECT sensor_id FROM all_sensor_info WHERE owner_id = {});""").format( sql.Literal(owner)) cursor.execute(sql_faulty) faulty_sensors = [x[0] for x in cursor.fetchall()] faulty_data = [] for i in faulty_sensors: sql_select = sql.SQL("""SELECT sensor_id, battery_level, soil_moisture, light, air_temperature, datetime FROM {} WHERE datetime = (SELECT MAX(datetime) FROM {})""").format( sql.Identifier(f'grow_data_{i}'), sql.Identifier(f'grow_data_{i}')) cursor.execute(sql_select) results = cursor.fetchall() faulty_data.append(results) return jsonify(faulty_data)
def filter_for_new_sensor_updates(new_sensor_list: List) -> List: """Compare sensor info to Postgres table to see if the individual sensor info is already in the table. If not present, keep the sensor info for further processing and eventual insert into Postgres table""" with UseDatabase(aurora_creds) as cursor: sql_check = """SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'all_sensor_info');""" cursor.execute(sql_check) response = cursor.fetchone() if response[0] == True: sql_collect = """SELECT row_to_json(all_sensor_info) FROM all_sensor_info;""" cursor.execute(sql_collect) all_stored_sensors = cursor.fetchall() else: all_stored_sensors = [] stored_sensor_ids = [] for i in all_stored_sensors: stored_sensor_ids.append(i[0]['sensor_id']) new_sensor_info = [] for i in new_sensor_list: if i[0] not in stored_sensor_ids: new_sensor_info.append(i) else: for stored_sensor in all_stored_sensors: if stored_sensor[0]['sensor_id'] == i[0] and \ stored_sensor[0]['end_date'].replace('-','').replace(':','') != i[3]: new_sensor_info.append(i) return new_sensor_info, stored_sensor_ids
def insert_anomalies(soil_anomalies: List, light_anomalies: List, air_anomalies: List, table_name: str, aurora_creds: dict, analyse_datetime: str) -> None: """Insert anomalous datetimes into AWS Aurora grow_anomalies table""" with UseDatabase(aurora_creds) as cursor: for anom in soil_anomalies: sql_insert = sql.SQL("""INSERT INTO grow_anomalies (grow_table, soil_date, last_analysed) VALUES({},{},{})""") \ .format(sql.Literal(table_name), sql.Literal(str(anom[1])), sql.Literal(analyse_datetime)) cursor.execute(sql_insert) for anom in light_anomalies: sql_insert = sql.SQL("""INSERT INTO grow_anomalies (grow_table, light_date, last_analysed) VALUES({},{},{})""") \ .format(sql.Literal(table_name), sql.Literal(str(anom[1])), sql.Literal(analyse_datetime)) cursor.execute(sql_insert) for anom in air_anomalies: sql_insert = sql.SQL("""INSERT INTO grow_anomalies (grow_table, air_date, last_analysed) VALUES({},{},{})""") \ .format(sql.Literal(table_name), sql.Literal(str(anom[1])), sql.Literal(analyse_datetime)) cursor.execute(sql_insert)
def recovered_stats() -> 'JSON': with UseDatabase(aurora_creds) as cursor: # healthy_sensors = request.args.get('healthy_sensors') # print(healthy_sensors) owner = request.args.get('owner_id') sql_recovered = sql.SQL("""SELECT sensor_id FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly >= 2) AND sensor_id IN (SELECT sensor_id FROM all_sensor_info WHERE owner_id = {});""").format( sql.Literal(owner)) cursor.execute(sql_recovered) recovered_sensors = [x[0] for x in cursor.fetchall()] recovered_data = [] for i in recovered_sensors: sql_select = sql.SQL("""SELECT sensor_id, battery_level, soil_moisture, light, air_temperature, datetime FROM {} WHERE datetime = (SELECT MAX(datetime) FROM {})""").format( sql.Identifier(f'grow_data_{i}'), sql.Identifier(f'grow_data_{i}')) cursor.execute(sql_select) results = cursor.fetchall() recovered_data.append(results) return jsonify(recovered_data)
def main(): """Connects to Aurora Database, calculates the delta between most recent GROW anomaly and most recent GROW recorded date. Inserts the delta as 'days_since_anomaly' column in 'grow_anomalies' Aurora table. """ aurora_secret = get_aurora_secret() aurora_creds = { 'host': aurora_secret['host'], 'port': aurora_secret['port'], 'dbname': aurora_secret['engine'], 'user': aurora_secret['username'], 'password': aurora_secret['password'] } with UseDatabase(aurora_creds) as cursor: sql_anomaly = """SELECT grow_table, MAX(GREATEST(soil_date, light_date, air_date)) FROM grow_anomalies GROUP BY grow_table;""" cursor.execute(sql_anomaly) anomaly_dates = cursor.fetchall() all_deltas = [] for i in anomaly_dates: sql_select = sql.SQL("""SELECT MAX(datetime) FROM {}""").format(sql.Identifier(i[0])) cursor.execute(sql_select) result_datetime = cursor.fetchone() all_deltas.append([i[0], result_datetime[0] - i[1]]) for i in all_deltas: sql_update = sql.SQL("""UPDATE public.grow_anomalies SET days_since_anomaly = {} WHERE grow_table = {}""").format( sql.Literal(i[1].days), sql.Literal(i[0])) cursor.execute(sql_update)
def all_grow_map() -> 'html': """Renders GROW map page with four statistics from SQL queries""" with UseDatabase(aurora_creds) as cursor: sql_select_sensors = """select count(*) from all_sensor_info ;""" cursor.execute(sql_select_sensors) total_sensors = cursor.fetchone()[0] sql_select_healthy = """SELECT count(*) FROM all_sensor_info WHERE sensor_id NOT IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies);""" cursor.execute(sql_select_healthy) healthy_sensors = cursor.fetchone()[0] sql_select_recovered = """SELECT count(*) FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly >= 2);""" cursor.execute(sql_select_recovered) recovered_sensors = cursor.fetchone()[0] sql_select_faulty = """SELECT count(*) FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly < 2);""" cursor.execute(sql_select_faulty) faulty_sensors = cursor.fetchone()[0] return render_template('new_grow_map.html', healthy_sensors=healthy_sensors, recovered_sensors=recovered_sensors, faulty_sensors=faulty_sensors, number_sensors=total_sensors)
def match_wow_site(sensor_id: str) -> str: """Find closest WOW site to select grow sensor""" with UseDatabase(db_creds) as cursor: cursor.execute("""SELECT site_id FROM grow_to_wow_mapping_0625 WHERE sensor_id = %s;""", (sensor_id,)) site_id = cursor.fetchone() # ('c1d3cfdd-4829-e911-9462-0003ff59610a',) # site_id = cursor.fetchall() # [('c1d3cfdd-4829-e911-9462-0003ff59610a',)] return site_id
def match_wow_site(sensor_id: str) -> str: """Find closest WOW site to select grow sensor""" with UseDatabase(aurora_creds) as cursor: cursor.execute( """SELECT site_id, distance FROM grow_to_wow_mapping WHERE sensor_id = %s;""", (sensor_id, )) site_id = cursor.fetchone() return site_id
def fetch_all_json() -> 'JSON': with UseDatabase(aurora_creds) as cursor: resp_list = [] SQL = """SELECT row_to_json(all_sensor_info) FROM all_sensor_info;""" cursor.execute(SQL) response = cursor.fetchall() for i in response: resp_list.append(i[0]) return jsonify(resp_list)
def autocomplete(): with UseDatabase(aurora_creds) as cursor: search = request.args.get('q') search_str = f'%{search}%' sql_select = sql.SQL("""SELECT address FROM all_sensor_info WHERE address LIKE {}""").format( sql.Literal(search_str)) cursor.execute(sql_select) results = [i[0] for i in cursor.fetchall()] return jsonify(matching_results=results)
def grow_by_address() -> 'JSON': with UseDatabase(aurora_creds) as cursor: address = request.args.get('address') resp_list = [] sql_select = sql.SQL("""SELECT row_to_json(all_sensor_info) FROM all_sensor_info WHERE address = {};""").format(sql.Literal(address)) cursor.execute(sql_select) response = cursor.fetchall() for i in response: resp_list.append(i[0]) return jsonify(resp_list)
def grow_by_owner() -> 'JSON': """Fetch all sensor info by owner""" with UseDatabase(aurora_creds) as cursor: owner = request.args.get('owner_id') resp_list = [] sql_select = sql.SQL("""SELECT row_to_json(all_sensor_info) FROM all_sensor_info WHERE owner_id = {};""").format(sql.Literal(owner)) cursor.execute(sql_select) response = cursor.fetchall() for i in response: resp_list.append(i[0]) return jsonify(resp_list)
def owner_stats() -> 'DataTable': """Return count of healthy, recovered, & faulty GROW sensors per GROW owner. """ with UseDatabase(aurora_creds) as cursor: owner = request.args.get('owner_id') sql_healthy = sql.SQL("""SELECT sensor_id FROM all_sensor_info WHERE sensor_id NOT IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies) AND sensor_id IN (SELECT sensor_id FROM all_sensor_info WHERE owner_id = {});""").format( sql.Literal(owner)) cursor.execute(sql_healthy) healthy_sensors = [x[0] for x in cursor.fetchall()] sql_recovered = sql.SQL("""SELECT sensor_id FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly >= 2) AND sensor_id IN (SELECT sensor_id FROM all_sensor_info WHERE owner_id = {});""").format( sql.Literal(owner)) cursor.execute(sql_recovered) recovered_sensors = [x[0] for x in cursor.fetchall()] sql_faulty = sql.SQL("""SELECT sensor_id FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly < 2) AND sensor_id IN (SELECT sensor_id FROM all_sensor_info WHERE owner_id = {});""").format( sql.Literal(owner)) cursor.execute(sql_faulty) faulty_sensors = [x[0] for x in cursor.fetchall()] sensor_dict = dict() sensor_dict['owner_id'] = owner sensor_dict['healthy'] = healthy_sensors sensor_dict['recovered'] = recovered_sensors sensor_dict['faulty'] = faulty_sensors print(jsonify(sensor_dict)) return jsonify(sensor_dict)
def check_most_recent_grow_data(aurora_creds: dict, sensor_id: str, start_date: str, end_date: str) -> Tuple[str, List]: """Check to see if the most recent sensor reading is already stored in AWS Aurora. If it is not already stored, calculate the delta time interval between last stored reading and last recorded reading, and calculate 10 day intervals that add up to the delta interval. This is done because the GROW API only allows query ranges to be 10 days maximum. """ with UseDatabase(aurora_creds) as cursor: table_name = f"grow_data_{sensor_id}" try: # Get the most recent sensor recording datetime cursor.execute( sql.SQL("SELECT MAX(datetime) FROM {}").format( sql.Identifier(table_name))) stored_end_date = cursor.fetchone()[0] if stored_end_date == None: stored_end_date = datetime.datetime.strptime( start_date, '%Y%m%d%H%M%S') except psycopg2.ProgrammingError: # If table does not exist stored_end_date = datetime.datetime.strptime( start_date, '%Y%m%d%H%M%S') end_dt = datetime.datetime.strptime(end_date, '%Y%m%d%H%M%S') delta = end_dt - stored_end_date print('delta', delta, 'stored_end_date', stored_end_date, 'sensor', sensor_id) if delta == datetime.timedelta(0): # If the stored end date and most recent end date are the same, no updates need to be made sensor_start_end_intervals = [] else: sensor_start_end_intervals = [] start = stored_end_date while delta > datetime.timedelta(0): if delta < datetime.timedelta(days=9): interval = [] end = end_dt interval.append(start.strftime('%Y%m%d%H%M%S')) interval.append(end.strftime('%Y%m%d%H%M%S')) sensor_start_end_intervals.append(interval) break else: interval = [] end = start + datetime.timedelta(days=9) interval.append(start.strftime('%Y%m%d%H%M%S')) interval.append(end.strftime('%Y%m%d%H%M%S')) sensor_start_end_intervals.append(interval) delta -= datetime.timedelta(days=9) start = end return sensor_id, sensor_start_end_intervals
def check_faulty_grow() -> List: """Fetch most recent anomaly date for specific GROW sensor""" sensor_id = request.args.get('sensor_id', None) grow_table = f'grow_data_{sensor_id}' with UseDatabase(aurora_creds) as cursor: sql_select = sql.SQL("""SELECT days_since_anomaly, MAX(GREATEST(soil_date, light_date, air_date)) FROM grow_anomalies WHERE grow_table = {} GROUP BY days_since_anomaly;""").format( sql.Literal(grow_table)) cursor.execute(sql_select) response = cursor.fetchall() return jsonify(response)
def fetch_all_healthy_json() -> 'JSON': """Fetch all healthy GROW sensor info as JSON""" with UseDatabase(aurora_creds) as cursor: resp_list = [] SQL = """SELECT row_to_json(all_sensor_info) FROM all_sensor_info WHERE sensor_id NOT IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies);""" cursor.execute(SQL) response = cursor.fetchall() for i in response: resp_list.append(i[0]) return jsonify(resp_list)
def grab_data(sensor_id: str, start_end_interval: List) -> List: with UseDatabase(aurora_creds) as cursor: table_name = f'grow_data_{sensor_id}' cursor.execute( sql.SQL("""SELECT soil_moisture, light, air_temperature, datetime FROM {} WHERE sensor_id = {} AND datetime >= {} AND datetime <= {}""").format( sql.Identifier(table_name), sql.Literal(sensor_id), sql.Literal(start_end_interval[0]), sql.Literal(start_end_interval[1]))) response = cursor.fetchall() return response
def entry() -> 'html': """Renders Entry page with two statistics from SQL queries""" with UseDatabase(aurora_creds) as cursor: sql_select_sensors = """select count(*) from all_sensor_info ;""" cursor.execute(sql_select_sensors) sensors = cursor.fetchone()[0] sql_select_owners = """SELECT COUNT(DISTINCT(owner_id)) FROM all_sensor_info;""" cursor.execute(sql_select_owners) owners = cursor.fetchone()[0] return render_template('login.html', number_sensors=sensors, number_owners=owners)
def autocomplete(): """Accepts HTML input and searces text in SQL statement, returns all results matching the HTML input text. """ with UseDatabase(aurora_creds) as cursor: search = request.args.get('q') search_str = f'%{search}%' sql_select = sql.SQL("""SELECT address FROM all_sensor_info WHERE address LIKE {}""").format( sql.Literal(search_str)) cursor.execute(sql_select) results = [i[0] for i in cursor.fetchall()] return jsonify(matching_results=results)
def get_all_grow_tables(aurora_creds: dict) -> np.ndarray: """Return all GROW table names from AWS Aurora DB, subtract those that have already been stored in grow_anomalies table. Result is array of GROW tables that have not been analysed yet for anomalies. """ # all_tables_array = pd.read_sql(sql_all, conn).values # anom_tables_array = pd.read_sql(sql_anom, conn).values with UseDatabase(aurora_creds) as cursor: # Fetch all grow data table names sql_all = """SELECT table_name FROM information_schema.tables WHERE table_name LIKE 'grow_data_%%';""" cursor.execute(sql_all) all_tables_array = cursor.fetchall() grow_tables = [] for i in all_tables_array: # Fetch most recent observation date recorded per grow table sql_grow = sql.SQL( """SELECT CONCAT('grow_data_', sensor_id), datetime FROM {} WHERE datetime = (SELECT MAX(datetime) FROM {}) """).format(sql.Identifier(i[0]), sql.Identifier(i[0])) cursor.execute(sql_grow) result_array = cursor.fetchall() grow_tables.append(result_array) # Fetch most recently analysed grow table & date sql_anom = """SELECT DISTINCT(grow_table), last_analysed FROM public.grow_anomalies WHERE last_analysed = (SELECT MAX(last_analysed) FROM public.grow_anomalies);""" cursor.execute(sql_anom) anom_tables_array = cursor.fetchall() # Find grow tables that have not been analysed yet, and # grow tables that have new data that needs to be analysed tables_to_analyse = [] for i in grow_tables: # If grow table has not been analysed yet if i[0][0][0] not in [x[0] for x in anom_tables_array]: tables_to_analyse.append(i[0][0][0]) continue for tab in anom_tables_array: # If grow table has more recent data than that already analysed if i[0][0][0] == tab[0] and i[0][0][1] > tab[1]: tables_to_analyse.append(i[0][0][0]) return tables_to_analyse
def login() -> 'html': with UseDatabase(aurora_creds) as cursor: sql_select_sensors = """SELECT COUNT(tablename) FROM pg_tables WHERE tablename LIKE ('grow_data_%');""" cursor.execute(sql_select_sensors) sensors = cursor.fetchone()[0] sql_select_owners = """SELECT COUNT(DISTINCT(owner_id)) FROM all_sensor_info;""" cursor.execute(sql_select_owners) owners = cursor.fetchone()[0] return render_template('login.html', number_sensors=sensors, number_owners=owners)
def insert_df_to_aurora(sensor_id: str) -> None: """Create table in AWS Aurora and insert GROW data""" with UseDatabase(aurora_creds) as cursor: table_name = f"grow_data_{sensor_id}" sql_create = sql.SQL("""CREATE TABLE IF NOT EXISTS {}( sensor_id varchar(8), datetime timestamp, soil_moisture numeric, light numeric, air_temperature numeric )""").format(sql.Identifier(table_name)) cursor.execute(sql_create) with open(f'temp_csvs/grow_data_{sensor_id}.csv') as csv: next(csv) cursor.copy_from(csv, table_name, columns=('datetime','soil_moisture','light','air_temperature','sensor_id'), sep=',')
def insert_to_db(aurora_creds: dict, mappings_and_distance: List) -> None: """Insert the GROW/WOW sensor/site mappings to AWS Aurora DB""" with UseDatabase(aurora_creds) as cursor: sql_create = """CREATE TABLE IF NOT EXISTS grow_to_wow_mapping( sensor_id varchar(8), grow_lat numeric, grow_lon numeric, site_id varchar(36), wow_lat numeric, wow_lon numeric, distance numeric);""" cursor.execute(sql_create) for i in mappings_and_distance: cursor.execute("""INSERT INTO grow_to_wow_mapping VALUES(%s, %s, %s, %s, %s, %s, %s)""", (i[0], i[1], i[2], i[3][0], i[3][1], i[3][2], i[4]))
def grab_data(sensor_id: str, variable: str) -> List: with UseDatabase(aurora_creds) as cursor: table_name = f'grow_data_{sensor_id}' cursor.execute( sql.SQL("""SELECT datetime, {} FROM {} WHERE sensor_id = {}""").format( sql.Identifier(variable), sql.Identifier(table_name), sql.Literal(sensor_id))) response = cursor.fetchall() dataframe = pd.DataFrame({ 'date': [x[0] for x in response], 'value': [float(x[1]) for x in response] }) dataframe.set_index('date', inplace=True) data_1d = dataframe.iloc[:, 0].values return data_1d
def grow_sensors_to_insert(grow_current_sensors: List) -> List: with UseDatabase(aurora_creds) as cursor: sql_table_check = """SELECT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = 'grow_to_wow_mapping');""" cursor.execute(sql_table_check) response = cursor.fetchone() if response[0] == True: sql_collect = """SELECT sensor_id FROM grow_to_wow_mapping;""" cursor.execute(sql_collect) all_stored_sensors = cursor.fetchall() else: all_stored_sensors = [] sensors_to_insert = [] for i in grow_current_sensors: if i[0] not in [x[0] for x in all_stored_sensors]: sensors_to_insert.append(i) return sensors_to_insert
def grab_data(sensor_id: str, variable: str) -> List: with UseDatabase(aurora_creds) as cursor: table_name = f'grow_data_{sensor_id}' cursor.execute( sql.SQL("""SELECT {} FROM {} WHERE sensor_id = {}""").format( sql.Identifier(variable), sql.Identifier(table_name), sql.Literal(sensor_id))) response = cursor.fetchall() cursor.execute( sql.SQL("""SELECT MIN(datetime), MAX(datetime) FROM {} WHERE sensor_id = {}""").format( sql.Identifier(table_name), sql.Literal(sensor_id))) dates = cursor.fetchall() start_date = dates[0][0].strftime('%Y-%m-%d %H:%M:%S') end_date = dates[0][1].strftime('%Y-%m-%d %H:%M:%S') return response, start_date, end_date, variable
def insert_anomalies(soil_anomalies: List, light_anomalies: List, air_anomalies: List, table_name: str, aurora_creds: dict, analyse_datetime: str) -> None: """Insert anomalous datetimes into AWS Aurora grow_anomalies table""" with UseDatabase(aurora_creds) as cursor: # If grow sensor table is already in grow_anomalies, delete the rows # so fresh data can be inserted in its place sql_check = sql.SQL("""SELECT * FROM grow_anomalies WHERE grow_table = {} LIMIT 1;""").format(sql.Literal(table_name)) cursor.execute(sql_check) results = cursor.fetchall() if results: sql_delete = sql.SQL("""DELETE FROM grow_anomalies WHERE grow_table = {}""").format(sql.Literal(table_name)) cursor.execute(sql_delete) for anom in soil_anomalies: sql_insert = sql.SQL("""INSERT INTO grow_anomalies (grow_table, soil_date, last_analysed) VALUES({},{},{})""") \ .format(sql.Literal(table_name), sql.Literal(str(anom[1])), sql.Literal(analyse_datetime)) cursor.execute(sql_insert) for anom in light_anomalies: sql_insert = sql.SQL("""INSERT INTO grow_anomalies (grow_table, light_date, last_analysed) VALUES({},{},{})""") \ .format(sql.Literal(table_name), sql.Literal(str(anom[1])), sql.Literal(analyse_datetime)) cursor.execute(sql_insert) for anom in air_anomalies: sql_insert = sql.SQL("""INSERT INTO grow_anomalies (grow_table, air_date, last_analysed) VALUES({},{},{})""") \ .format(sql.Literal(table_name), sql.Literal(str(anom[1])), sql.Literal(analyse_datetime)) cursor.execute(sql_insert)
def all_grow_map() -> 'html': with UseDatabase(aurora_creds) as cursor: # sql_select_sensors = """SELECT COUNT(tablename) # FROM pg_tables # WHERE tablename LIKE ('grow_data_%');""" sql_select_sensors = """select count(*) from all_sensor_info ;""" cursor.execute(sql_select_sensors) total_sensors = cursor.fetchone()[0] sql_select_healthy = """SELECT count(*) FROM all_sensor_info WHERE sensor_id NOT IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies);""" cursor.execute(sql_select_healthy) healthy_sensors = cursor.fetchone()[0] sql_select_recovered = """SELECT count(*) FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly >= 2);""" cursor.execute(sql_select_recovered) recovered_sensors = cursor.fetchone()[0] sql_select_faulty = """SELECT count(*) FROM all_sensor_info WHERE sensor_id IN (SELECT SUBSTRING(grow_table, 11, 8) FROM grow_anomalies WHERE days_since_anomaly < 2);""" cursor.execute(sql_select_faulty) faulty_sensors = cursor.fetchone()[0] return render_template('new_grow_map.html', healthy_sensors=healthy_sensors, recovered_sensors=recovered_sensors, faulty_sensors=faulty_sensors, number_sensors=total_sensors)
for a in sensor_readings: if a[0] == json_object['Data'][0]['LocationCode'] and a[1] == edit_datetime: a.append('temperature') a.append(reading['Value']) elif i['VariableCode'].endswith('temperature'): indiv_reading = [] datetime = reading['DateTime'] edit_datetime = datetime[:8] + 'T' + datetime[8:] indiv_reading.append(json_object['Data'][0]['LocationCode']) indiv_reading.append(edit_datetime) indiv_reading.append('air_temperature') indiv_reading.append(reading['Value']) sensor_readings.append(indiv_reading) with UseDatabase(db_creds) as cursor: for i in sensor_readings: cursor.execute("""INSERT INTO combined_variables VALUES(%s, %s, %s, %s, %s)""", (i[0], i[1], i[3], i[5], i[7])) # extracts three variables, but they exist in a list separately # sensor_id | datetime | soil_moisture | light | air_temp # -----------+---------------------+---------------+-------+---------- # 02krq5q5 | 2018-11-05 23:51:25 | 39.01 | |