def _update_cached_note_variables():
    try:
        note_count_sql_query = "SELECT count(" + db_v.other_table_column_notes + ") FROM " + db_v.table_other
        app_cached_variables.notes_total_count = sql_execute_get_data(
            note_count_sql_query)[0][0]

        selected_note_sql_query = "SELECT " + db_v.other_table_column_notes + " FROM " + db_v.table_other
        sql_notes = sql_execute_get_data(selected_note_sql_query)
        app_cached_variables.cached_notes_as_list = []
        for note in sql_notes:
            app_cached_variables.cached_notes_as_list.append(str(note[0]))
    except Exception as error:
        logger.sensors_logger.error(
            "Unable to update cached note variables: " + str(error))
示例#2
0
def get_db_note_user_dates():
    """ Returns a comma separated string of User Note Dates from the SQL Database. """
    sql_query_user_datetime = "SELECT " + db_v.other_table_column_user_date_time + \
                              " FROM " + db_v.table_other
    sql_data_user_datetime = sqlite_database.sql_execute_get_data(
        sql_query_user_datetime)
    return _create_str_from_list(sql_data_user_datetime)
def _update_cached_sensor_reboot_count():
    """
    Returns the number of times the sensor has rebooted as a str.
    Reboot count is calculated by uptime values stored in the Database.
    """
    sql_query = "SELECT " + db_v.sensor_uptime + " FROM " + db_v.table_interval + \
                " WHERE length(" + db_v.sensor_uptime + ") < 2"
    sql_column_data = sql_execute_get_data(sql_query)

    reboot_count = 0
    previous_entry = 0
    bad_entries = 0
    for entry in sql_column_data:
        try:
            entry_int = int(entry[0])
        except Exception as error:
            print("Bad SQL Entry in System Uptime column: " + str(entry) +
                  " : " + str(error))
            bad_entries += 1
            entry_int = previous_entry

        if entry_int < previous_entry:
            reboot_count += 1
        previous_entry = entry_int

    if bad_entries:
        logger.sensors_logger.warning(
            str(bad_entries) + " bad entries in DB reboot column")
    debug_message = "Linux System - " + str(
        len(sql_column_data)) + " entries in DB reboot column retrieved"
    logger.sensors_logger.debug(debug_message)
    app_cached_variables.reboot_count = reboot_count
示例#4
0
def _get_clean_sql_data(var_sql_query, db_location, data_to_float=True):
    sql_data_tuple = sql_execute_get_data(var_sql_query, db_location)
    cleaned_list = []
    for data in sql_data_tuple:
        if data_to_float:
            try:
                cleaned_list.append(float(data[0]))
            except Exception as error:
                logger.primary_logger.debug("Plotly Graph data to Float Failed: " + str(error))
        else:
            cleaned_list.append(data[0])
    return cleaned_list
示例#5
0
def _start_plotly_graph(graph_data):
    """ Creates a Offline Plotly graph from a SQL database. """
    logger.primary_logger.debug("SQL Columns: " + str(graph_data.selected_sensors_list))
    logger.primary_logger.debug("SQL Table(s): " + graph_data.graph_db_table)
    logger.primary_logger.debug("SQL Start DateTime: " + graph_data.graph_datetime_start)
    logger.primary_logger.debug("SQL End DateTime: " + graph_data.graph_datetime_end)

    try:
        # Adjust dates to Database timezone in UTC 0
        new_time_offset = graph_data.datetime_offset * -1
        get_sql_graph_start = adjust_datetime(graph_data.graph_datetime_start, new_time_offset)
        get_sql_graph_end = adjust_datetime(graph_data.graph_datetime_end, new_time_offset)
        graph_data.sql_ip = get_one_db_entry(graph_data.graph_db_table, db_v.ip, database=graph_data.db_location)

        for var_column in graph_data.selected_sensors_list:
            var_sql_query = "SELECT " + var_column + \
                            " FROM " + graph_data.graph_db_table + \
                            " WHERE " + var_column + \
                            " IS NOT NULL AND DateTime BETWEEN datetime('" + get_sql_graph_start + \
                            "') AND datetime('" + get_sql_graph_end + \
                            "') AND ROWID % " + str(graph_data.sql_queries_skip + 1) + " = 0" + \
                            " ORDER BY " + db_v.all_tables_datetime + " DESC" + \
                            " LIMIT " + str(graph_data.max_sql_queries)

            var_time_sql_query = "SELECT " + db_v.all_tables_datetime + \
                                 " FROM " + graph_data.graph_db_table + \
                                 " WHERE " + var_column + \
                                 " IS NOT NULL AND DateTime BETWEEN datetime('" + get_sql_graph_start + \
                                 "') AND datetime('" + get_sql_graph_end + \
                                 "') AND ROWID % " + str(graph_data.sql_queries_skip + 1) + " = 0" + \
                                 " ORDER BY " + db_v.all_tables_datetime + " DESC" + \
                                 " LIMIT " + str(graph_data.max_sql_queries)

            original_sql_column_date_time = sql_execute_get_data(var_time_sql_query, graph_data.db_location)
            sql_column_date_time = []
            for var_d_time in original_sql_column_date_time:
                sql_column_date_time.append(adjust_datetime(var_d_time[0], graph_data.datetime_offset))

            if var_column == db_v.all_tables_datetime:
                graph_data.datetime_entries_in_db = len(sql_column_date_time)
            elif var_column == db_v.sensor_name or var_column == db_v.ip:
                graph_data.graph_data_dic[var_column][0] = _get_clean_sql_data(
                    var_sql_query, graph_data.db_location, data_to_float=False
                )
                graph_data.graph_data_dic[var_column][1] = sql_column_date_time
            else:
                graph_data.graph_data_dic[var_column][0] = _get_clean_sql_data(var_sql_query, graph_data.db_location)
                graph_data.graph_data_dic[var_column][1] = sql_column_date_time
        _plotly_graph(graph_data)
    except Exception as error:
        logger.primary_logger.warning("Plotly Graph Generation Failed: " + str(error))
def _get_sensor_info_string(sensor_id):
    get_sensor_checkin_count_per_id_sql = "SELECT count('" + db_v.all_tables_datetime + "') FROM '" + sensor_id + "';"
    checkin_count = sql_execute_get_data(get_sensor_checkin_count_per_id_sql,
                                         sql_database_location=db_loc)

    utc0_hour_offset = app_config_access.primary_config.utc0_hour_offset
    db_datetime_entry = get_one_db_entry_wrapper(sensor_id,
                                                 db_v.all_tables_datetime)
    adjusted_datetime = adjust_datetime(db_datetime_entry, utc0_hour_offset)
    installed_sensors = get_one_db_entry_wrapper(
        sensor_id, db_v.sensor_check_in_installed_sensors)
    return render_template(
        "ATPro_admin/page_templates/sensor_checkins/sensor-checkin-info-template.html",
        SensorID=sensor_id,
        SensorName=get_one_db_entry_wrapper(sensor_id, db_v.sensor_name),
        SensorIP=get_one_db_entry_wrapper(sensor_id, db_v.ip),
        LastCheckinDate=adjusted_datetime,
        TotalCheckins=get_sql_element(checkin_count),
        SoftwareVersion=get_one_db_entry_wrapper(sensor_id,
                                                 db_v.kootnet_sensors_version),
        SensorUptime=get_one_db_entry_wrapper(sensor_id, db_v.sensor_uptime),
        InstalledSensors=installed_sensors)
示例#7
0
def get_db_note_dates():
    """ Returns a comma separated string of Note Dates from the SQL Database. """
    sql_query_notes = "SELECT " + db_v.all_tables_datetime + \
                      " FROM " + db_v.table_other
    sql_note_dates = sqlite_database.sql_execute_get_data(sql_query_notes)
    return _create_str_from_list(sql_note_dates)