def get_rain_gauges():
    localdf = 0
    #    db = MySQLdb.connect(host = '192.168.150.253', user = '******', passwd = 'senslope', db = 'senslopedb')
    query = "select gauge_name, rain_id from senslopedb.rainfall_gauges where data_source = 'senslope' and date_deactivated is null"
    #    localdf = psql.read_sql(query, db)
    localdf = qdb.get_db_dataframe(query)
    return localdf
Exemple #2
0
def getLoggerList():
    localdf = 0
    # db = MySQLdb.connect(host = '192.168.150.75', user = '******', passwd = 'NaCAhztBgYZ3HwTkvHwwGVtJn5sVMFgg', db = 'senslopedb')
    #    db = MySQLdb.connect(host = '127.0.0.1', user = '******', passwd = 'senslope', db = 'senslopedb')
    query = "select tsm_id, tsm_name from senslopedb.tsm_sensors where date_deactivated is null"
    localdf = qdb.get_db_dataframe(query)
    return localdf
def get_tsm_sensors():
    localdf = 0
    #    db = MySQLdb.connect(host = '192.168.150.253', user = '******', passwd = 'senslope', db = 'senslopedb')
    query = "select tsm_id, tsm_name from senslopedb.tsm_sensors where date_deactivated is null"
    #    localdf = psql.read_sql(query, db)
    localdf = qdb.get_db_dataframe(query)
    return localdf
Exemple #4
0
def view_status():
    try:
        conn = mysql.connect()
        cursor = conn.cursor(pymysql.cursors.DictCursor)
        query = (
            "SELECT stat_id, accelerometer_status.accel_id, tsm_name, "
            "node_id, accel_number, ts_flag, date_identified, status, "
            "IF(status=1,'Ok', IF(status=2,'Use with Caution', "
            "IF(status=3,'Special Case', IF(status=4,'Not Ok', NULL)))) "
            "as accel_status, remarks FROM accelerometer_status "
            "inner join accelerometers on "
            "accelerometer_status.accel_id = accelerometers.accel_id "
            "inner join tsm_sensors on accelerometers.tsm_id = tsm_sensors.tsm_id "
            "order by tsm_name, node_id, accel_number")
        #        rows = cursor.fetchall()

        rows = qdb.get_db_dataframe(query)
        rows = rows.to_dict('r')

        table = status(rows)
        table.border = True
        return render_template('summary.html', table=table, tsupdate='')
    except Exception as e:
        print(e)
    finally:
        cursor.close()
        conn.close()
def get_data_tsm(lgrname):
#    db = MySQLdb.connect(host = '192.168.150.253', user = '******', passwd = 'senslope', db = 'senslopedb')
    query= "SELECT max(ts) FROM " + 'tilt_' + lgrname + "  where ts > '2010-01-01' and '2019-01-01' order by ts desc limit 1 "
#    localdf = psql.read_sql(query, db)
    localdf = qdb.get_db_dataframe(query)
    print (localdf)
    return localdf
Exemple #6
0
def update_in_use():
    print('verygood')
    try:
        tsm_id = request.form['input_tsm_id']
        node_id = request.form['input_node_id']
        accel_inuse = request.form['input_accel_inuse']

        #        date_identified = request.form['input_date_identified']
        #        status = request.form['input_status']
        #        remarks = request.form['input_remarks']
        #        flagger = request.form['input_flagger']
        #        stat_id = request.form['id']
        # validate the received values
        if tsm_id and request.method == 'POST':
            query = ("SELECT * FROM accelerometers "
                     "where tsm_id = '{}' and node_id = '{}'".format(
                         tsm_id, node_id))
            accel = qdb.get_db_dataframe(query)

            for i in accel.accel_number:
                print(i)
                if int(accel_inuse) == i:
                    in_use = 1
                else:
                    in_use = 0

                update_query = (
                    "UPDATE `accelerometers` "
                    "SET `in_use`='{}', `ts_updated` = NOW() WHERE `tsm_id`='{}' and "
                    "`node_id`='{}' and `accel_number` = '{}';".format(
                        in_use, tsm_id, node_id, i))
                qdb.execute_query(update_query)
                print(update_query)


#            #do not save password as a plain text
##            _hashed_password = generate_password_hash(_password)
#            # save edits
##            sql = "UPDATE tbl_user SET user_name=%s, user_email=%s, user_password=%s WHERE user_id=%s"
#            sql = ("UPDATE accelerometer_status SET accel_id = '{}', "
#                   "ts_flag = NOW(), date_identified = '{}', flagger='{}', "
#                   "status='{}', remarks='{}' WHERE stat_id = '{}'".format(accel_id, date_identified, flagger, status, remarks, stat_id))
##            conn = mysql.connect()
##            cursor = conn.cursor()
##            cursor.execute(sql, data)
##            conn.commit()
#            qdb.execute_query(sql)
            init.main()
            flash('accel switched successfully!')
            return redirect('/')
        else:
            return 'Error while updating status'
    except Exception as e:
        print(e)
        flash('accel switched ERROR!')
        return redirect('/')
def trending_alert_gen(pos_alert, tsm_id, end):

    if qdb.does_table_exist('node_alerts') == False:
        #Create a node_alerts table if it doesn't exist yet
        create_node_alerts()

    query = "SELECT EXISTS(SELECT * FROM node_alerts"
    query += " WHERE ts = '%s'" % end
    query += " and tsm_id = %s and node_id = %s)" % (
        tsm_id, pos_alert['node_id'].values[0])

    if qdb.get_db_dataframe(query).values[0][0] == 0:
        node_alert = pos_alert[['disp_alert', 'vel_alert']]
        node_alert['ts'] = end
        node_alert['tsm_id'] = tsm_id
        node_alert['node_id'] = pos_alert['node_id'].values[0]
        qdb.push_db_dataframe(node_alert, 'node_alerts', index=False)

    query = "SELECT * FROM node_alerts WHERE tsm_id = %s and node_id = %s and ts >= '%s'" % (
        tsm_id, pos_alert['node_id'].values[0], end - timedelta(hours=3))
    node_alert = qdb.get_db_dataframe(query)

    node_alert['node_alert'] = np.where(
        node_alert['vel_alert'].values >= node_alert['disp_alert'].values,

        #node alert takes the higher perceive risk between vel alert and disp alert
        node_alert['vel_alert'].values,
        node_alert['disp_alert'].values)

    if len(node_alert[node_alert.node_alert > 0]) > 3:
        trending_alert = pd.DataFrame({
            'node_id': [pos_alert['node_id'].values[0]],
            'TNL': [max(node_alert['node_alert'].values)]
        })
    else:
        trending_alert = pd.DataFrame({
            'node_id': [pos_alert['node_id'].values[0]],
            'TNL': [0]
        })

    return trending_alert
def get_loggers_v2():
    localdf=0
    #db = MySQLdb.connect(host = '192.168.150.253', user = '******', passwd = 'senslope', db = 'senslopedb')
    query = """select lg.logger_name, lg.logger_id
    from (select * from loggers) as lg
    inner join senslopedb.logger_models as lm
    on lg.model_id = lm.model_id
    where lm.logger_type in ('arq', 'regular', 'router')
    and
    logger_name like '%___t_%'
    or
    logger_name like '%___s_%'"""
#    localdf = psql.read_sql(query, qdb)
    localdf = qdb.get_db_dataframe(query)
    return localdf
def get_loggers_v3():
    localdf = 0
    #    db = MySQLdb.connect(host = '192.168.150.253', user = '******', passwd = 'senslope', db = 'senslopedb')
    query = """select lg.logger_name, lg.logger_id
    from (select * from loggers) as lg
    inner join senslopedb.logger_models as lm
    on lg.model_id = lm.model_id
    where lm.logger_type in ('gateway','arq')
    and
    logger_name like '%___r_%'
    or 
    logger_name like '%___g%' 
    and lg.logger_name not in ("madg")"""

    #    localdf = psql.read_sql(query, db)
    localdf = qdb.get_db_dataframe(query)
    return localdf
Exemple #10
0
def update_in_use_view():
    memc = memcache.Client(['127.0.0.1:11211'], debug=1)
    tsm_sensors = memc.get('DF_TSM_SENSORS')

    id = request.args.get('accel_id', default='', type=int)
    print(id)
    active_tsm = tsm_sensors[['tsm_id', 'tsm_name']][pd.isnull(
        tsm_sensors.date_deactivated)].sort_values(by='tsm_name')
    active_tsm = active_tsm.to_dict('r')

    if id:
        try:
            #        conn = mysql.connect()
            #        cursor = conn.cursor(pymysql.cursors.DictCursor)
            query = ("SELECT * FROM accelerometers "
                     "where tsm_id = (select tsm_id FROM accelerometers "
                     "where accel_id = {}) "
                     "and node_id = (select node_id FROM accelerometers "
                     "where accel_id = {})".format(id, id))
            #        row = cursor.fetchone()
            row = qdb.get_db_dataframe(query)
            row = row.to_dict('r')
            if row:
                print("nagana in_use")
                print(row)
                return render_template('update_inuse.html',
                                       row=row,
                                       active_tsm=active_tsm)
    #                                   data=[{'status':'Ok', 'stat':1}, {'status':'Use with Caution', 'stat':2}, {'status':'Special Case', 'stat':3}, {'status':'Not Ok', 'stat':4}],
    #                                   name = [{'name': 'Kate Justine Flores'},{'name': 'Brainerd Cruz'},{'name': 'Kennex Razon'}])
            else:
                return 'Error loading #{id}'.format(id=id)
        except Exception as e:
            print(e)
    else:
        return render_template('update_inuse.html',
                               row=[{
                                   'accel_number': '1',
                                   'in_use': '1'
                               }, {
                                   'accel_number': '2',
                                   'in_use': '0'
                               }],
                               active_tsm=active_tsm)
        print("bago")
Exemple #11
0
def main():
    memc = memcache.Client(['127.0.0.1:11211'], debug=1)
    tsm_sensors = memc.get('DF_TSM_SENSORS')

    dffc = pd.DataFrame(columns=[
        'accel_id', 'ts', 'percent_raw', 'percent_voltf', 'percent_rangef',
        'percent_orthof', 'percent_outlierf'
    ])
    for i in tsm_sensors.tsm_id:
        print(i)
        dft = filter_counter(i)
        engine = create_engine(
            'mysql+pymysql://root:[email protected]:3306/senslopedb',
            echo=False)
        dft.to_sql(name='data_counter',
                   con=engine,
                   if_exists='append',
                   index=False)
        dffc = pd.concat([dffc, dft], ignore_index=True)

    query = (
        "SELECT l.logger_id, sms_msg FROM mia_comms_db.smsinbox_loggers "
        "inner join (SELECT mobile_id,logger_id from mia_comms_db.logger_mobile) as lm "
        "on lm.mobile_id = smsinbox_loggers.mobile_id "
        "inner join (SELECT logger_name, logger_id from mia_senslopedb.loggers) as l "
        "on lm.logger_id = l.logger_id "
        "where inbox_id >= (SELECT max(inbox_id)-10000 FROM mia_comms_db.smsinbox_loggers) "
        "and sms_msg like '%no %' or sms_msg like 'pow%' "
        "and ts_sms >= DATE_SUB(Now(), interval 1 DAY) "
        "#group by l.logger_id")
    sms_stat = qdb.get_db_dataframe(query)
    sms_stat = sms_stat.groupby(['logger_id']).first().reset_index()

    sms_summary = pd.merge(tsm_sensors[['tsm_id', 'tsm_name', 'logger_id']],
                           sms_stat,
                           how='left',
                           on='logger_id')
    print(sms_summary)
    engine = create_engine(
        'mysql+pymysql://root:[email protected]:3306/senslopedb',
        echo=False)
    sms_summary[['tsm_id', 'sms_msg']].to_sql(name='tsm_sms_stat',
                                              con=engine,
                                              if_exists='replace',
                                              index=False)
def get_valid_cotriggers(site_id, public_ts_start):
    query =  "SELECT "
    query += "  ot.trigger_id, ot.ts, ot.site_id, "
    query += "  ot.ts_updated, ot.trigger_sym_id, ots.alert_symbol, "
    query += "  ots.alert_level "
    query += "FROM operational_triggers AS ot "
    query += "JOIN operational_trigger_symbols AS ots"
    query += "  ON ot.trigger_sym_id = ots.trigger_sym_id "
    query += "JOIN alert_status AS als"
    query += "  ON ot.trigger_id = als.trigger_id "
    query += "WHERE "
    query += "  ts = '%s' " %(public_ts_start) 
    query += "  AND ot.site_id = %s " %(site_id)
    query += "  AND als.alert_status = 1 " #change this to 0 for validating
    query += "ORDER BY ts DESC"
    result = qdb.get_db_dataframe(query)
    
    return result
Exemple #13
0
def edit_view(id):
    print(int(id))
    try:
        #        conn = mysql.connect()
        #        cursor = conn.cursor(pymysql.cursors.DictCursor)
        query = "SELECT * FROM accelerometer_status WHERE stat_id={}".format(
            id)
        #        row = cursor.fetchone()
        row = qdb.get_db_dataframe(query)
        #        print "###################################################"
        #        print id
        #        print row
        row = row.to_dict('r')[0]
        if row:
            print("nagana")
            print(row)
            return render_template('edit.html',
                                   row=row,
                                   data=[{
                                       'status': 'Ok',
                                       'stat': 1
                                   }, {
                                       'status': 'Use with Caution',
                                       'stat': 2
                                   }, {
                                       'status': 'Special Case',
                                       'stat': 3
                                   }, {
                                       'status': 'Not Ok',
                                       'stat': 4
                                   }],
                                   name=[{
                                       'name': 'Kate Justine Flores'
                                   }, {
                                       'name': 'Brainerd Cruz'
                                   }, {
                                       'name': 'Kennex Razon'
                                   }])

        else:
            return 'Error loading #{id}'.format(id=id)
    except Exception as e:
        print(e)
Exemple #14
0
def deployment_form():
    query = "SELECT site_id, site_code FROM sites order by site_code asc"
    site = qdb.get_db_dataframe(query)
    site = site.to_dict('r')
    return render_template('form.html',
                           network=[{
                               'network': 'Globe'
                           }, {
                               'network': 'Smart'
                           }],
                           logger_type=[{
                               'logger': 'masterbox'
                           }, {
                               'logger': 'arq'
                           }, {
                               'logger': 'router'
                           }, {
                               'logger': 'gateway'
                           }],
                           site=site)
def get_surficial_trigger(start_ts, end_ts):
    query =  "SELECT "
    query += "  ot.trigger_id, ot.ts, ot.site_id, als.alert_status, "
    query += "  ot.ts_updated, ot.trigger_sym_id, ots.alert_symbol, "
    query += "  ots.alert_level, pas.pub_sym_id, sites.site_code "
    query += "FROM operational_triggers AS ot "
    query += "JOIN operational_trigger_symbols AS ots"
    query += "  ON ot.trigger_sym_id = ots.trigger_sym_id "
    query += "JOIN public_alert_symbols AS pas"
    query += "  ON ots.alert_level = pas.alert_level "
    query += "JOIN alert_status AS als"
    query += "  ON ot.trigger_id = als.trigger_id "
    query += "JOIN sites "
    query += "  ON ot.site_id = sites.site_id "
    query += "WHERE "
    query += "  (ts >= '%s' AND ts_updated <= '%s') " %(start_ts, end_ts) 
    query += "  AND source_id = 2 "
    #query += "  AND als.alert_status = -1 " #change this to 0 for validating
    query += "ORDER BY ts DESC"  
    result = qdb.get_db_dataframe(query)
        
    return result
Exemple #16
0
def main(alert):

    site = alert.site_code
    ts = alert.ts_last_retrigger

    OutputFP = os.path.abspath(
        os.path.join(os.path.dirname(__file__), '../')
    )  #os.path.dirname(os.path.realpath(__file__))+'/{} {}/'.format(site, ts.strftime("%Y-%m-%d %H%M"))
    OutputFP += '/node alert validation sandbox/' + '{} {}/'.format(
        site, ts.strftime("%Y-%m-%d %H%M"))
    OutputFP = OutputFP.replace("\\", "/")

    if not os.path.exists(OutputFP):
        os.makedirs(OutputFP)
    else:
        return False

    ts_before = ts.round('4H') - td(hours=4)

    queryalert = """SELECT na_id,ts,t.tsm_id,tsm_name,node_id,disp_alert,vel_alert 
                FROM senslopedb.node_alerts
                inner join tsm_sensors as t
                on t.tsm_id=node_alerts.tsm_id
                where site_id={} and (ts between '{}' and '{}')

                order by tsm_name, node_id, ts desc""".format(
        alert.site_id, ts_before, ts)
    dfalert = qdb.get_db_dataframe(queryalert).groupby(
        ['tsm_id', 'node_id']).first().reset_index()

    for i in dfalert.index:
        print(dfalert.tsm_name[i], dfalert.node_id[i], dfalert.ts[i])

        xyz.xyzplot(dfalert.tsm_id[i], dfalert.node_id[i], dfalert.ts[i],
                    OutputFP)
    return OutputFP
Exemple #17
0
def home():
    global df_summary, df_count, df_raw, df_filter, df_volt
    try:
        #get latest ts
        #        conn = mysql.connect()
        #        cursor = conn.cursor(pymysql.cursors.DictCursor)
        #        cursor.execute
        query = (
            "SELECT * FROM deployment_logs "
            "inner join loggers on loggers.logger_id = deployment_logs.logger_id "
            "#inner join tsm_sensors on tsm_sensors.logger_id = deployment_logs.logger_id"
        )
        #        summary = cursor.fetchall()
        summary = qdb.get_db_dataframe(query)
        #        print "###################################################"
        #        print id
        #        print row
        summary = summary.to_dict('r')
        table = Results(summary)
        table.border = True
        return render_template('summary.html',
                               table=table)  #rows.to_html(index=False))
    except Exception as e:
        print(e)
def xyzplot(tsm_id, nid, time, OutputFP=''):
    memc = memcache.Client(['127.0.0.1:11211'], debug=1)
    tsm_sensors = memc.get('DF_TSM_SENSORS')
    accelerometers = memc.get('DF_ACCELEROMETERS')
    accel = accelerometers[(accelerometers.tsm_id == tsm_id)
                           & (accelerometers.in_use == 1)]

    tsm_name = tsm_sensors.tsm_name[tsm_sensors.tsm_id == tsm_id].values[0]
    nid_up = nid - 1
    nid_down = nid + 1
    time = pd.to_datetime(time)
    from_time = time - td(days=6)
    to_time = time

    #dataframe
    df_node = qdb.get_raw_accel_data(tsm_id=tsm_id,
                                     from_time=time - td(weeks=1),
                                     to_time=time,
                                     analysis=True,
                                     batt=True)
    dff = fsd.apply_filters(df_node)

    #raw and filter counter
    raw_count = float(df_node.ts[(df_node.node_id == nid)
                                 & (df_node.ts >= time - td(days=3))].count())
    filter_count = dff.ts[(dff.node_id == nid)
                          & (dff.ts >= time - td(days=3))].count()
    try:
        percent = filter_count / raw_count * 100.0
    except ZeroDivisionError:
        percent = 0

#valid invalid
    query_na = ("SELECT COUNT(IF(na_status=1,1, NULL))/count(ts)*100.0 as "
                "'percent_valid' FROM senslopedb.node_alerts "
                "where tsm_id = {} and node_id = {} and na_status is not NULL "
                "group by tsm_id, node_id".format(tsm_id, nid))
    df_na = qdb.get_db_dataframe(query_na)
    if df_na.empty:
        validity = np.nan
    else:
        validity = df_na.percent_valid.values[0]

    fig = plt.figure()
    fig.suptitle("{}{} ({})".format(tsm_name, str(nid),
                                    time.strftime("%Y-%m-%d %H:%M")),
                 fontsize=11)

    #accelerometer status
    query = '''SELECT status,remarks FROM senslopedb.accelerometer_status as astat
            inner join 
            (select * from accelerometers where tsm_id={} and node_id={} 
            and in_use=1) as a
            on a.accel_id=astat.accel_id
            order by stat_id desc limit 1'''.format(tsm_id, nid)
    dfs = qdb.get_db_dataframe(query)
    if not dfs.empty:
        stat_id = dfs.status[0]
        if stat_id == 1:
            stat = 'Ok'
        elif stat_id == 2:
            stat = 'Use with Caution'
        elif stat_id == 3:
            stat = 'Special Case'
        elif stat_id == 4:
            stat = 'Not Ok'

        com = dfs.remarks[0]
    else:
        stat = 'Ok'
        com = ''

    fig.text(0.125,
             0.95,
             'Status: {}\nComment: {}'.format(stat, com),
             horizontalalignment='left',
             verticalalignment='top',
             fontsize=8,
             color='blue')
    # end of accelerometer status

    #filter/raw
    fig.text(0.900,
             0.95,
             '%%filter/raw = %.2f%%\n%%validity = %.2f%%' %
             (percent, validity),
             horizontalalignment='right',
             verticalalignment='top',
             fontsize=8,
             color='blue')

    df0 = dff[(dff.node_id == nid_up) & (dff.ts >= from_time) &
              (dff.ts <= to_time)]
    dfr0 = df_node[(df_node.node_id == nid_up) & (df_node.ts >= from_time) &
                   (df_node.ts <= to_time)]

    if not df0.empty:
        df0 = df0.set_index('ts')
        dfr0 = dfr0.set_index('ts')
        ax1 = plt.subplot(3, 4, 1)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df0['x'].plot(color='green')
        ax1.tick_params(axis='both', direction='in', labelsize=7)
        plt.ylabel(tsm_name + str(nid_up), color='green', fontsize=14)
        plt.title('x-axis', color='green', fontsize=8, verticalalignment='top')

        ax2 = plt.subplot(3, 4, 2, sharex=ax1)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df0['y'].plot(color='green')
        ax2.tick_params(axis='both', direction='in', labelsize=7)
        plt.title('y-axis', color='green', fontsize=8, verticalalignment='top')

        ax3 = plt.subplot(3, 4, 3, sharex=ax1)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df0['z'].plot(color='green')
        ax3.tick_params(axis='both', direction='in', labelsize=7)
        plt.title('z-axis', color='green', fontsize=8, verticalalignment='top')

        try:
            axb1 = plt.subplot(3, 4, 4, sharex=ax1)
            plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
            dfr0['batt'].plot(color='green')
            axb1.tick_params(axis='both', direction='in', labelsize=7)
            axb1.axhline(accel.voltage_max[accel.node_id == nid_up].values[0],
                         color='black',
                         linestyle='--',
                         linewidth=1)
            axb1.axhline(accel.voltage_min[accel.node_id == nid_up].values[0],
                         color='black',
                         linestyle='--',
                         linewidth=1)
            plt.title('batt',
                      color='green',
                      fontsize=8,
                      verticalalignment='top')
        except:
            print("v1")

        plt.xlim([from_time, to_time])

#        for t in time:
#        ax1.axvline(time, color='gray', linestyle='--', linewidth=0.7)
#        ax2.axvline(time, color='gray', linestyle='--', linewidth=0.7)
#        ax3.axvline(time, color='gray', linestyle='--', linewidth=0.7)

    df = dff[(dff.node_id == nid) & (dff.ts >= from_time) &
             (dff.ts <= to_time)]
    dfr = df_node[(df_node.node_id == nid) & (df_node.ts >= from_time) &
                  (df_node.ts <= to_time)]
    if not df.empty:
        df = df.set_index('ts')
        dfr = dfr.set_index('ts')
        ax4 = plt.subplot(3, 4, 5)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df['x'].plot(color='blue')
        ax4.tick_params(axis='both', direction='in', labelsize=7)
        plt.ylabel(tsm_name + str(nid), color='blue', fontsize=14)
        plt.title('x-axis', color='blue', fontsize=8, verticalalignment='top')

        ax5 = plt.subplot(3, 4, 6, sharex=ax4)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df['y'].plot(color='blue')
        ax5.tick_params(axis='both', direction='in', labelsize=7)
        plt.title('y-axis', color='blue', fontsize=8, verticalalignment='top')

        ax6 = plt.subplot(3, 4, 7, sharex=ax4)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df['z'].plot(color='blue')
        ax6.tick_params(axis='both', direction='in', labelsize=7)
        plt.title('z-axis', color='blue', fontsize=8, verticalalignment='top')

        try:
            axb2 = plt.subplot(3, 4, 8, sharex=ax4)
            plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
            dfr['batt'].plot(color='blue')
            axb2.tick_params(axis='both', direction='in', labelsize=7)
            axb2.axhline(accel.voltage_max[accel.node_id == nid].values[0],
                         color='black',
                         linestyle='--',
                         linewidth=1)
            axb2.axhline(accel.voltage_min[accel.node_id == nid].values[0],
                         color='black',
                         linestyle='--',
                         linewidth=1)
            plt.title('batt',
                      color='blue',
                      fontsize=8,
                      verticalalignment='top')
        except:
            print("v1")

        plt.xlim([from_time, to_time])

#        for t in time:
#        ax4.axvline(time, color='gray', linestyle='--', linewidth=0.7)
#        ax5.axvline(time, color='gray', linestyle='--', linewidth=0.7)
#        ax6.axvline(time, color='gray', linestyle='--', linewidth=0.7)

    df1 = dff[(dff.node_id == nid_down) & (dff.ts >= from_time) &
              (dff.ts <= to_time)]
    dfr1 = df_node[(df_node.node_id == nid_down) & (df_node.ts >= from_time) &
                   (df_node.ts <= to_time)]
    if not df1.empty:
        df1 = df1.set_index('ts')
        dfr1 = dfr1.set_index('ts')

        ax7 = plt.subplot(3, 4, 9)
        df1['x'].plot(color='red')
        ax7.tick_params(axis='both', direction='in', labelsize=7)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        plt.ylabel(tsm_name + str(nid_down), color='red', fontsize=14)
        plt.title('x-axis', color='red', fontsize=8, verticalalignment='top')

        ax8 = plt.subplot(3, 4, 10, sharex=ax7)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df1['y'].plot(color='red')
        ax8.tick_params(axis='both', direction='in', labelsize=7)
        plt.title('y-axis', color='red', fontsize=8, verticalalignment='top')

        ax9 = plt.subplot(3, 4, 11, sharex=ax7)
        plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
        df1['z'].plot(color='red')
        ax9.tick_params(axis='both', direction='in', labelsize=7)
        plt.title('z-axis', color='red', fontsize=8, verticalalignment='top')

        try:
            axb3 = plt.subplot(3, 4, 12, sharex=ax7)
            plt.axvspan(time - td(days=3), time, facecolor='yellow', alpha=0.4)
            dfr1['batt'].plot(color='red')
            axb3.tick_params(axis='both', direction='in', labelsize=7)
            axb3.axhline(
                accel.voltage_max[accel.node_id == nid_down].values[0],
                color='black',
                linestyle='--',
                linewidth=1)
            axb3.axhline(
                accel.voltage_min[accel.node_id == nid_down].values[0],
                color='black',
                linestyle='--',
                linewidth=1)
            plt.title('batt', color='red', fontsize=8, verticalalignment='top')
        except:
            print("v1")
        plt.xlim([from_time, to_time])

#        for t in time:
#        ax7.axvline(time, color='gray', linestyle='--', linewidth=0.7)
#        ax8.axvline(time, color='gray', linestyle='--', linewidth=0.7)
#        ax9.axvline(time, color='gray', linestyle='--', linewidth=0.7)
#    plt.show()

    plt.savefig(OutputFP + tsm_name + str(nid) + '(' +
                time.strftime("%Y-%m-%d %H%M") + ')',
                dpi=400)
Exemple #19
0
def evaluate():
    memc = memcache.Client(['127.0.0.1:11211'], debug=1)
    accelerometers = memc.get('DF_ACCELEROMETERS')
    tsm_sensors = memc.get('DF_TSM_SENSORS')
    tsm_sensors = tsm_sensors[pd.isnull(tsm_sensors.date_deactivated)]

    query = "SELECT * FROM data_counter where ts = (SELECT max(ts) FROM data_counter)"
    #df_count = qdb.get_db_dataframe(query)
    #    engine=create_engine('mysql+pymysql://root:[email protected]:3306/senslopedb', echo = False)
    conn = mysql.connect()
    df_count = pd.read_sql(query, con=conn)

    query_sms_stat = "SELECT * FROM tsm_sms_stat"
    #df_count = qdb.get_db_dataframe(query)
    #    engine=create_engine('mysql+pymysql://root:[email protected]:3306/senslopedb', echo = False)
    conn = mysql.connect()
    df_sms = pd.read_sql(query_sms_stat, con=conn)

    query_stat = (
        "Select accel_id, status, remarks from "
        "(SELECT max(stat_id) as 'latest_stat_id' FROM "
        "accelerometer_status group by accel_id) as stat "
        "inner join accelerometer_status on latest_stat_id = stat_id")
    dfstat = qdb.get_db_dataframe(query_stat)
    #engine=create_engine('mysql+mysqlconnector://root:[email protected]:3306/senslopedb', echo = False)
    #dfstat = pd.read_sql(query_stat, con=engine)

    df_count = pd.merge(accelerometers[[
        'tsm_id', 'accel_id', 'node_id', 'accel_number', 'in_use'
    ]],
                        df_count,
                        how='inner',
                        on='accel_id')
    df_count = pd.merge(df_count, dfstat, how='outer', on='accel_id')

    df_count = pd.merge(tsm_sensors[['tsm_id', 'tsm_name']],
                        df_count,
                        how='inner',
                        on='tsm_id')

    df_count['good_raw'] = 0
    df_count['good_raw'][df_count.percent_raw >= 75] = 1

    df_count['good_volt'] = np.nan
    df_count['good_volt'][df_count.percent_voltf >= 75] = 1
    df_count['good_volt'][df_count.percent_voltf < 75] = 0

    df_count['good_range'] = np.nan
    df_count['good_range'][df_count.percent_rangef >= 70] = 1
    df_count['good_range'][df_count.percent_rangef < 70] = 0

    df_count['good_ortho'] = np.nan
    df_count['good_ortho'][df_count.percent_orthof / df_count.percent_rangef *
                           100.0 >= 80] = 1
    df_count['good_ortho'][df_count.percent_orthof / df_count.percent_rangef *
                           100.0 < 80] = 0

    df_count['good_outlier'] = np.nan
    df_count['good_outlier'][df_count.percent_outlierf /
                             df_count.percent_orthof * 100.0 >= 80] = 1
    df_count['good_outlier'][df_count.percent_outlierf /
                             df_count.percent_orthof * 100.0 < 80] = 0

    query_validity = (
        "SELECT tsm_id,node_id, COUNT(IF(na_status=1,1, NULL))/count(ts)*100.0 "
        "as 'percent_valid' FROM node_alerts group by tsm_id, node_id")
    df_validity = qdb.get_db_dataframe(query_validity)
    df_validity = pd.merge(accelerometers[['tsm_id', 'accel_id', 'node_id']],
                           df_validity,
                           how='outer',
                           on=['tsm_id', 'node_id'])
    df_validity = df_validity.drop(['tsm_id', 'node_id'], axis=1)

    df_count = pd.merge(df_count, df_validity, how='inner', on='accel_id')

    df_count['recommendation'] = np.nan
    df_count['recommendation'][(df_count.percent_valid < 50
                                )] = 'tag as use with caution - alert invalid'
    df_count['recommendation'][(
        df_count.good_outlier == 0)] = 'tag as fluctuating data'
    df_count['recommendation'][(df_count.good_ortho == 0) & (
        df_count.status != 4)] = 'tag as not ok - invalid magnitude'
    df_count['recommendation'][(df_count.good_range == 0) & (
        df_count.status != 4)] = 'tag as not ok - out of range'

    #count good raw (with at least 75% of data)
    good_raw = df_count['ts'][(df_count.percent_raw >= 75)
                              & (df_count.status != 4)].groupby([
                                  df_count.tsm_id, df_count.tsm_name
                              ]).size().rename('good_raw')
    bad_raw = df_count['ts'][(df_count.percent_raw < 75)
                             & (df_count.status != 4)].groupby(
                                 [df_count.tsm_id,
                                  df_count.tsm_name]).size().rename('bad_raw')
    not_ok = df_count['ts'][df_count.status == 4].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('not_ok')
    df_raw = pd.concat([good_raw, bad_raw, not_ok], axis=1)
    df_raw[np.isnan(df_raw)] = 0
    df_raw = df_raw.reset_index()
    df_raw = df_raw.sort_values(by='tsm_name').reset_index(drop=True)
    df_raw['percent_good_raw'] = df_raw.good_raw / (df_raw.good_raw +
                                                    df_raw.bad_raw) * 100.0
    df_raw['raw_status'] = np.nan
    df_raw['raw_status'][df_raw.percent_good_raw > 50] = 'Ok'
    df_raw['raw_status'][df_raw.percent_good_raw <= 50] = 'Not Ok'

    #count good volt (with at least 75% of data)
    good_volt = df_count['ts'][df_count.percent_voltf >= 75].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('good_volt')
    bad_volt = df_count['ts'][df_count.percent_voltf < 75].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('bad_volt')
    nan_volt = df_count['ts'][np.isnan(df_count.percent_voltf)].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('nan_volt')
    df_volt = pd.concat([good_volt, bad_volt, nan_volt], axis=1)
    df_volt[np.isnan(df_volt)] = 0
    df_volt = df_volt.reset_index()
    df_volt['percent_good_volt'] = df_volt.good_volt / (
        df_volt.good_volt + df_volt.bad_volt) * 100.0
    df_volt['volt_status'] = 'no data'
    df_volt['volt_status'][df_volt.percent_good_volt > 50] = 'Ok'
    df_volt['volt_status'][df_volt.percent_good_volt <= 50] = 'Not Ok'

    #count good filtered (with at least 70% of data)
    good_filter = df_count['ts'][df_count.percent_outlierf >= 70].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('good_filtered')
    bad_filter = df_count['ts'][df_count.percent_outlierf < 70].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('bad_filtered')
    nan_filter = df_count['ts'][np.isnan(df_count.percent_outlierf)].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('nan_filtered')
    to_tag_filter = df_count['ts'][df_count.recommendation.notnull()].groupby(
        [df_count.tsm_id, df_count.tsm_name]).size().rename('to_tag_filtered')

    df_filter = pd.concat([good_filter, bad_filter, nan_filter, to_tag_filter],
                          axis=1)
    df_filter[np.isnan(df_filter)] = 0
    df_filter = df_filter.reset_index()
    df_filter['percent_good_filtered'] = df_filter.good_filtered / (
        df_filter.good_filtered + df_filter.bad_filtered) * 100.0
    df_filter['filter_status'] = 'no data'
    df_filter['filter_status'][df_filter.percent_good_filtered > 10] = 'Ok'
    df_filter['filter_status'][
        df_filter.to_tag_filtered > 0] = 'to tag accelerometers'
    df_filter['filter_status'][
        df_filter.percent_good_filtered <= 10] = 'Not Ok'

    df_count = df_count.drop([
        'count_id', 'ts', 'good_raw', 'good_volt', 'good_range', 'good_ortho',
        'good_outlier'
    ],
                             axis=1)

    df_summary = pd.merge(df_raw[['tsm_id', 'tsm_name', 'raw_status']],
                          df_filter[['tsm_id', 'tsm_name', 'filter_status']],
                          how='inner',
                          on=['tsm_id', 'tsm_name'])
    df_summary = pd.merge(df_summary,
                          df_volt[['tsm_id', 'tsm_name', 'volt_status']],
                          how='inner',
                          on=['tsm_id', 'tsm_name'])
    df_summary = pd.merge(df_summary, df_sms, how='left', on='tsm_id')
    return df_summary, df_count, df_raw, df_filter, df_volt
Exemple #20
0
def main():

    # asks for tsm name
    while True:
        props = qdb.get_tsm_list(input('sensor name: '))
        if len(props) == 1:
            break
        else:
            qdb.print_out('sensor name is not in the list')
            continue

    tsm_props = props[0]

    # asks if to plot from date activated (or oldest data) to most recent data
    while True:
        input_text = 'plot from start to end of data? (Y/N): '
        plot_all_data = input(input_text).lower()
        if plot_all_data == 'y' or plot_all_data == 'n':
            break

    # asks if to specify end timestamp of monitoring window
    if plot_all_data == 'n':
        while True:
            input_text = 'specify end timestamp of monitoring window? (Y/N): '
            test_specific_time = input(input_text).lower()
            if test_specific_time == 'y' or test_specific_time == 'n':
                break

        # ask for timestamp of end of monitoring window defaults to datetime.now
        if test_specific_time == 'y':
            while True:
                try:
                    input_text = 'plot end timestamp (format: 2016-12-31 23:30): '
                    end = pd.to_datetime(input(input_text))
                    break
                except:
                    print ('invalid datetime format')
                    continue
        else:
            end = datetime.now()
            
        # monitoring window and server configurations
        window, sc = rtw.get_window(end)
        
        # asks if to plot with 3-day monitoring window
        while True:
            input_text = 'plot with 3-day monitoring window? (Y/N): '
            three_day_window = input(input_text).lower()
            if three_day_window == 'y' or three_day_window == 'n':
                break
        
        # asks start of monitoring window defaults to values in server config
        if three_day_window == 'n':
            while True:
                input_text = 'start of monitoring window (in days) '
                input_text += 'or datetime (format: 2016-12-31 23:30): '
                start = input(input_text)
                try:
                    window.start = window.end - timedelta(int(start))
                    break
                except:
                    try:
                        window.start = pd.to_datetime(start)
                        break
                    except:
                        print ('datetime format or integer only')
                        continue
                    
            # computes offsetstart from given start timestamp
            window.offsetstart = window.start - timedelta(days=(sc['subsurface']
            ['num_roll_window_ops']*window.numpts-1)/48.)
            
    else:
        # check date of activation
        query = "SELECT date_activated FROM tsm_sensors"
        query += " WHERE tsm_name = '%s'" %tsm_props.tsm_name
        try:
            date_activated = qdb.get_db_dataframe(query).values[0][0]
        except:
            date_activated = pd.to_datetime('2010-01-01')
        #compute for start to end timestamp of data
        query = "(SELECT * FROM tilt_%s" %tsm_props.tsm_name
        query += " where ts > '%s' ORDER BY ts LIMIT 1)" %date_activated
        query += " UNION ALL"
        query += " (SELECT * FROM tilt_%s" %tsm_props.tsm_name
        query += " ORDER BY ts DESC LIMIT 1)"
        start_end = qdb.get_db_dataframe(query)
        
        end = pd.to_datetime(start_end['ts'].values[1])
        window, sc = rtw.get_window(end)
        
        start_dataTS = pd.to_datetime(start_end['ts'].values[0])
        start_dataTS_Year=start_dataTS.year
        start_dataTS_month=start_dataTS.month
        start_dataTS_day=start_dataTS.day
        start_dataTS_hour=start_dataTS.hour
        start_dataTS_minute=start_dataTS.minute
        if start_dataTS_minute<30:start_dataTS_minute=0
        else:start_dataTS_minute=30
        window.offsetstart=datetime.combine(date(start_dataTS_Year,
                            start_dataTS_month, start_dataTS_day),
                            time(start_dataTS_hour, start_dataTS_minute, 0))

        # computes offsetstart from given start timestamp
        window.start = window.offsetstart + timedelta(days=(sc['subsurface']
                                ['num_roll_window_ops']*window.numpts-1)/48.)


    # asks if to plot velocity and asks for interval and legends to show in
    # column position plots if to plot all data or if monitoring window not
    # equal to 3 days    
    if plot_all_data == 'y' or three_day_window == 'n':
        # asks for interval between column position plots
        while True:
            try:
                input_text = 'interval between column position plots, in days: '
                col_pos_interval = int(input(input_text))
                break
            except:
                qdb.print_out('enter an integer')
                continue
            
        # computes for interval and number of column position plots
        sc['subsurface']['col_pos_interval'] = str(col_pos_interval) + 'D'
        sc['subsurface']['num_col_pos'] = int((window.end - window.start).
                                                    days/col_pos_interval + 1)
                                                    
        # asks if to plot all legends
        while True:
            input_text = 'show all legends in column position plot? (Y/N): '
            show_all_legend = input(input_text).lower()
            if show_all_legend == 'y' or show_all_legend == 'n':        
                break

        if show_all_legend == 'y':
            show_part_legend = False
        # asks which legends to show
        elif show_all_legend == 'n':
            while True:
                try:
                    show_part_legend = int(input('every nth legend to show: '))
                    if show_part_legend <= sc['subsurface']['num_col_pos']:
                        break
                    else:
                        input_text = 'integer should be less than '
                        input_text += 'the number of colpos dates to plot: '
                        input_text += '%s' %(sc['subsurface']['num_col_pos'])
                        qdb.print_out(input_text)
                        continue
                except:
                    qdb.print_out('enter an integer')
                    continue
                
        while True:
            plotvel = input('plot velocity? (Y/N): ').lower()
            if plotvel == 'y' or plotvel == 'n':        
                break
            
        if plotvel == 'y':
            plotvel = True
        else:
            plotvel = False
        
        three_day_window = False            
    else:
        plotvel = True
        show_part_legend = True
        three_day_window = True

    # asks which point to fix in column position plots
    while True:
        input_text = 'column fix for colpos (top/bottom). '
        input_text += 'press enter to skip; '
        input_text += 'default for monitoring is fix bottom: '
        column_fix = input(input_text).lower()
        if column_fix in ['top', 'bottom', '']:
            break
        
    if column_fix == '':
        column_fix = 'bottom'        
    sc['subsurface']['column_fix'] = column_fix
      
    # mirror xz and/or xy colpos
    while True:
        try:
            mirror_xz = bool(int(input('mirror image of xz colpos? (0/1): ')))
            break
        except:
            print ('Invalid. 1 for mirror image of xz colpos else 0')
            continue
    while True:
        try:
            mirror_xy = bool(int(input('mirror image of xy colpos? (0/1): ')))
            break
        except:
            print ('Invalid. 1 for mirror image of xy colpos else 0')
            continue

    data = proc.proc_data(tsm_props, window, sc, realtime=True, comp_vel=plotvel)
    plotter.main(data, tsm_props, window, sc, plotvel=plotvel,
                 show_part_legend = show_part_legend, realtime=True,
                 plot_inc=False, three_day_window=three_day_window,
                 mirror_xz=mirror_xz, mirror_xy=mirror_xy)
Exemple #21
0
query = (
    "SELECT stat_id, site_code,s.site_id, trigger_source, alert_symbol, "
    "ts_last_retrigger FROM "
    "(SELECT stat_id, ts_last_retrigger, site_id, trigger_source, "
    "alert_symbol FROM "
    "(SELECT stat_id, ts_last_retrigger, site_id, trigger_sym_id FROM "
    "(SELECT * FROM alert_status WHERE "
    "ts_set >= NOW()-interval 5 minute "
    "and ts_ack is NULL"
    #        "stat_id=1025 "
    ") AS stat "
    "INNER JOIN "
    "operational_triggers AS op "
    "ON stat.trigger_id = op.trigger_id) AS trig "
    "INNER JOIN "
    "(Select * from operational_trigger_symbols  where source_id=1) AS sym "
    "ON trig.trigger_sym_id = sym.trigger_sym_id "
    "inner join trigger_hierarchies as th "
    "on th.source_id=sym.source_id) AS alert "
    "INNER JOIN "
    "sites as s "
    "ON s.site_id = alert.site_id")

smsalert = qdb.get_db_dataframe(query)

for i in smsalert.index:
    OutputFP = main(smsalert.loc[i])
    if not OutputFP:
        print("nasend na!")
    else:
        send_messenger(OutputFP, smsalert.loc[i])
Exemple #22
0
def getPoints(lgrname):

    query = "SELECT max(ts) FROM " + 'tilt_' + lgrname + "  where ts > '2010-01-01' and '2019-01-01' order by ts desc limit 1 "
    localdf = qdb.get_db_dataframe(query)
    print(localdf)
    return localdf