def _check_for_changes_in_sensor_info_data(data_entries): try: if data_entries[1] == str( get_one_db_entry(db_v.table_ks_info, db_v.sensor_name)): data_entries[1] = None if data_entries[2] == str(get_one_db_entry(db_v.table_ks_info, db_v.ip)): data_entries[2] = None if data_entries[3] == str( get_one_db_entry(db_v.table_ks_info, db_v.kootnet_sensors_version)): data_entries[3] = None except Exception as error: logger.primary_logger.error("Checking DB Info entries: " + str(error)) return data_entries
def _md5_matches_previous_configs_zip_md5(new_md5): try: last_md5_of_zip = str( get_one_db_entry(db_v.table_ks_info, db_v.ks_info_configuration_backups_md5)) if last_md5_of_zip == new_md5: return True except Exception as error: logger.primary_logger.error( "* Unable to verify backup configurations MD5: " + str(error)) return False
def _start_plotly_graph(graph_data): """ Creates a Offline Plotly graph from a SQL database. """ logger.primary_logger.debug("SQL Columns: " + str(graph_data.selected_sensors_list)) logger.primary_logger.debug("SQL Table(s): " + graph_data.graph_db_table) logger.primary_logger.debug("SQL Start DateTime: " + graph_data.graph_datetime_start) logger.primary_logger.debug("SQL End DateTime: " + graph_data.graph_datetime_end) try: # Adjust dates to Database timezone in UTC 0 new_time_offset = graph_data.datetime_offset * -1 get_sql_graph_start = adjust_datetime(graph_data.graph_datetime_start, new_time_offset) get_sql_graph_end = adjust_datetime(graph_data.graph_datetime_end, new_time_offset) graph_data.sql_ip = get_one_db_entry(graph_data.graph_db_table, db_v.ip, database=graph_data.db_location) for var_column in graph_data.selected_sensors_list: var_sql_query = "SELECT " + var_column + \ " FROM " + graph_data.graph_db_table + \ " WHERE " + var_column + \ " IS NOT NULL AND DateTime BETWEEN datetime('" + get_sql_graph_start + \ "') AND datetime('" + get_sql_graph_end + \ "') AND ROWID % " + str(graph_data.sql_queries_skip + 1) + " = 0" + \ " ORDER BY " + db_v.all_tables_datetime + " DESC" + \ " LIMIT " + str(graph_data.max_sql_queries) var_time_sql_query = "SELECT " + db_v.all_tables_datetime + \ " FROM " + graph_data.graph_db_table + \ " WHERE " + var_column + \ " IS NOT NULL AND DateTime BETWEEN datetime('" + get_sql_graph_start + \ "') AND datetime('" + get_sql_graph_end + \ "') AND ROWID % " + str(graph_data.sql_queries_skip + 1) + " = 0" + \ " ORDER BY " + db_v.all_tables_datetime + " DESC" + \ " LIMIT " + str(graph_data.max_sql_queries) original_sql_column_date_time = sql_execute_get_data(var_time_sql_query, graph_data.db_location) sql_column_date_time = [] for var_d_time in original_sql_column_date_time: sql_column_date_time.append(adjust_datetime(var_d_time[0], graph_data.datetime_offset)) if var_column == db_v.all_tables_datetime: graph_data.datetime_entries_in_db = len(sql_column_date_time) elif var_column == db_v.sensor_name or var_column == db_v.ip: graph_data.graph_data_dic[var_column][0] = _get_clean_sql_data( var_sql_query, graph_data.db_location, data_to_float=False ) graph_data.graph_data_dic[var_column][1] = sql_column_date_time else: graph_data.graph_data_dic[var_column][0] = _get_clean_sql_data(var_sql_query, graph_data.db_location) graph_data.graph_data_dic[var_column][1] = sql_column_date_time _plotly_graph(graph_data) except Exception as error: logger.primary_logger.warning("Plotly Graph Generation Failed: " + str(error))
def get_one_db_entry_wrapper(table_name, column_name, order="DESC"): return get_one_db_entry(table_name=table_name, column_name=column_name, order=order, database=db_loc)