class StatsWriter: def __init__(self, config_file="config.ini", update_interval=7200): self._read_config_file(config_file) self._update_interval = update_interval self._db = SensorsDAO() def _read_config_file(self, config_file): cp = ConfigParser() cp.read(config_file) self._work_day_start = cp.get("settings", "work_day_start") self._work_day_end = cp.get("settings", "work_day_end") def _process_stats_data(self): df = self._get_dataframe_from_db(dt.now().date()) df = self._filter_dataframe_time_window(df) return df def _filter_dataframe_time_window(self, df): # check if dataframe is not empty if (len(df) != 0): df = df.set_index("datetime").between_time( self._work_day_start, self._work_day_end).reset_index() return df def _get_dataframe_from_db(self, date): return self._db.get_data_single(date) def start(self): while True: stats_data = self._process_stats_data() self._db.insert_stats(stats_data) time.sleep(self._update_interval)
def __init__(self, config_file="config.ini", update_interval=7200): self._read_config_file(config_file) self._update_interval = update_interval self._db = SensorsDAO() self._insert_stmt = { "co2_level": self._db.get_session().prepare( "INSERT INTO co2_level (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ), "dew_point": self._db.get_session().prepare( "INSERT INTO dew_point (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ), "relative_humidity": self._db.get_session().prepare( "INSERT INTO relative_humidity (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ), "temperature": self._db.get_session().prepare( "INSERT INTO temperature (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ) }
class StatsWriter: def __init__(self, config_file="config.ini", update_interval=7200): self._read_config_file(config_file) self._update_interval = update_interval self._db = SensorsDAO() self._insert_stmt = { "co2_level": self._db.get_session().prepare( "INSERT INTO co2_level (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ), "dew_point": self._db.get_session().prepare( "INSERT INTO dew_point (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ), "relative_humidity": self._db.get_session().prepare( "INSERT INTO relative_humidity (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ), "temperature": self._db.get_session().prepare( "INSERT INTO temperature (date,ip,name,peak,mean,std ) values ( ?,?,?,?,?,?)" ) } def _read_config_file(self, config_file): cp = ConfigParser() cp.read(config_file) self._work_day_start = cp.get("settings", "work_day_start") self._work_day_end = cp.get("settings", "work_day_end") def _process_stats_data(self): df = self._get_dataframe_from_db(dt.now().date()) df = self._filter_dataframe_time_window(df) return df def _filter_dataframe_time_window(self, df): df = df.set_index("datetime").between_time( self._work_day_start, self._work_day_end).reset_index() return df def _get_dataframe_from_db(self, date): stmt_date_single = self._db.get_session().prepare( "select * from sensors_data where date = ?") df = self._db.get_session().execute(stmt_date_single, [date])._current_rows return df def _write_dataframe_to_db(self, df): for i in self._insert_stmt: for key, grp in df.groupby(['ip', 'date', 'name']): self._db.get_session().execute(self._insert_stmt[i], [ key[1], key[0], key[2], np.max(grp[i]), np.mean(grp[i]), np.std(grp[i]) ]) def start(self): while True: stats_data = self._process_stats_data() self._write_dataframe_to_db(stats_data) time.sleep(self._update_interval)
from dash_extensions.snippets import send_data_frame from plotly.subplots import make_subplots import base64 import dash_table import math from configparser import ConfigParser import dash_daq as daq from cassandra.cluster import Cluster from cassandra.query import SimpleStatement from cassandra.query import dict_factory from DAO import SensorsDAO server = Flask(__name__) # Initiate Database object db = SensorsDAO() app = dash.Dash( __name__, server=server, title="CLF CO2 Sensors", external_stylesheets=[ dbc.themes.BOOTSTRAP, "https://codepen.io/chriddyp/pen/bWLwgP.css", ], ) image_filename = "CLFlogo.png" encoded_image = base64.b64encode(open(image_filename, "rb").read()) cp = ConfigParser()
def __init__(self, config_file="config.ini"): logger.debug("SDR startup, config file = {0}".format(config_file)) self._read_config_file(config_file) logger.debug("Loaded config file") self._sensors = [Sensor(d) for d in self._sensors_details] self._db = SensorsDAO()
class SensorDataReader: def __init__(self, config_file="config.ini"): logger.debug("SDR startup, config file = {0}".format(config_file)) self._read_config_file(config_file) logger.debug("Loaded config file") self._sensors = [Sensor(d) for d in self._sensors_details] self._db = SensorsDAO() def _read_config_file(self, config_file): cp = ConfigParser() cp.read(config_file) self._sensors_details = [{ "ip": i[0], "name": i[1] } for i in cp["sensors"].items()] logger.debug("Loaded {0} sensors".format(len(self._sensors_details))) self._sample_interval = cp.getint("settings", "sensor read interval") logger.debug("Sample interval: {0}".format(self._sample_interval)) self._data_file_location = cp.get("settings", "data file location") logger.debug("Data file location: {0}".format( self._data_file_location)) self._timeout_value = cp.getint("settings", "warning timeout") logger.debug("Warning timeout: {0}".format(self._timeout_value)) def _get_today(self): return datetime.date.today().strftime("%Y%m%d") def _make_dir_if_needed(self, filename): directory, _ = os.path.split(filename) if not os.path.exists(directory): os.makedirs(directory) def _generate_todays_filename(self): return self._data_file_location + os.sep + self._get_today( ) + "_sensors.csv" def _make_todays_csv_file_if_necessary(self): filename = self._generate_todays_filename() if not Path(filename).is_file(): first_line_in_file = ( "ip,name,Time,Temperature,Relative humidity,Dew point,CO2 level\n" ) self._make_dir_if_needed(filename) with open(filename, "w") as f: f.write(first_line_in_file) return filename def _start_sensors(self): for sensor in self._sensors: sensor.start_data_collection(self._sample_interval) def _check_sensor_status(self): sensors_status = self._data_file_location + os.sep + "sensors_status.csv" first_line_in_file = "name,timestamp,timeout\n" with open(sensors_status, "w") as f: f.write(first_line_in_file) for s in self._sensors: if s.seconds_since_successful_read > self._timeout_value: f.write(s.name + "," + str(s.time_of_last_successful_read) + "," + "invalid\n") else: f.write(s.name + "," + str(s.time_of_last_successful_read) + "," + "valid\n") def log_sensor_status(self, sensor): if sensor.seconds_since_successful_read > self._timeout_value: self._db.insert_sensor_status( [sensor.ip, sensor.name, sensor._last_successful_read, False]) else: self._db.insert_sensor_status( [sensor.ip, sensor.name, sensor._last_successful_read, True]) def start(self): self._start_sensors() while True: for s in self._sensors: if (len(s.latest_db_data) != 0): self._db.insert_data(s.latest_db_data) self.log_sensor_status(s) time.sleep(self._sample_interval)
def __init__(self, config_file="config.ini", update_interval=7200): self._read_config_file(config_file) self._update_interval = update_interval self._db = SensorsDAO()