예제 #1
0
def site_alerts(curr_trig, ts, release_data_ts, connection):
    df = curr_trig.drop_duplicates(
        ['site_id', 'trigger_source', 'alert_level'])
    site_id = df['site_id'].values[0]

    query = "SELECT trigger_id, MAX(ts_last_retrigger) ts_last_retrigger FROM alert_status"
    query += " WHERE trigger_id IN (%s)" %(','.join(map(lambda x: str(x), \
                                         set(df['trigger_id'].values))))
    query += " GROUP BY trigger_id"
    written = db.df_read(query, connection=connection)

    site_curr_trig = pd.merge(df, written, how='left')
    site_curr_trig = site_curr_trig.loc[
        (site_curr_trig.ts_last_retrigger +
         timedelta(1) < site_curr_trig.ts_updated) |
        (site_curr_trig.ts_last_retrigger.isnull()), :]

    if len(site_curr_trig) == 0:
        qdb.print_out('no new trigger for site_id %s' % site_id)
        return

    alert_status = site_curr_trig[['ts_updated', 'trigger_id']]
    alert_status = alert_status.rename(
        columns={'ts_updated': 'ts_last_retrigger'})
    alert_status['ts_set'] = datetime.now()
    data_table = sms.DataTable('alert_status', alert_status)
    db.df_write(data_table, connection=connection)
def dftosql(df):
    v2df = get_loggers_v2()
    v3df = get_loggers_v3()
    logger_active = pd.DataFrame()
    loggers = v2df.append(v3df).reset_index()

    logger_active = pd.DataFrame()
    for i in range(0, len(v2df)):
        logger_active = logger_active.append(get_data_tsm(v2df.logger_name[i]))

    for i in range(0, len(v3df)):
        logger_active = logger_active.append(
            get_data_rain(v3df.logger_name[i]))

    logger_active = logger_active.reset_index()
    timeNow = datetime.today()
    df['last_data'] = logger_active['max(ts)']
    df['last_data'] = pd.to_datetime(df['last_data'])
    df['ts_updated'] = timeNow
    df['logger_id'] = loggers.logger_id
    diff = df['ts_updated'] - df['last_data']
    tdta = diff
    fdta = tdta.astype('timedelta64[D]')
    df['diff_days'] = fdta

    df.loc[(df['diff_days'] > -1) & (df['diff_days'] < 3), 'presence'] = 'active'
    df['presence'] = df['diff_days'].apply(lambda x: '1' if x <= 3 else '0')

    data_table = sms.DataTable('data_presence_loggers', df)
    db.df_write(data_table, connection='analysis')

    return df
예제 #3
0
def trending_alert_gen(pos_alert, tsm_id, end):
    
    if qdb.does_table_exist('node_alerts') == False:
        #Create a node_alerts table if it doesn't exist yet
        create_node_alerts()
            
    query = "SELECT EXISTS(SELECT * FROM node_alerts"
    query += " WHERE ts = '%s'" %end
    query += " and tsm_id = %s and node_id = %s)" %(tsm_id, pos_alert['node_id'].values[0])
    
    if db.df_read(query, connection='local').values[0][0] == 0:
        node_alert = pos_alert[['disp_alert', 'vel_alert']]
        node_alert['ts'] = end
        node_alert['tsm_id'] = tsm_id
        node_alert['node_id'] = pos_alert['node_id'].values[0]
        data_table = sms.DataTable('node_alerts', node_alert)
        db.df_write(data_table, connection='local')
    
    query = "SELECT * FROM node_alerts WHERE tsm_id = %s and node_id = %s and ts >= '%s'" %(tsm_id, pos_alert['node_id'].values[0], end-timedelta(hours=3))
    node_alert = db.df_read(query, connection='local')
    
    node_alert['node_alert'] = np.where(node_alert['vel_alert'].values >= node_alert['disp_alert'].values,

                             #node alert takes the higher perceive risk between vel alert and disp alert
                             node_alert['vel_alert'].values,                                

                             node_alert['disp_alert'].values)
    
    if len(node_alert[node_alert.node_alert > 0]) > 3:        
        trending_alert = pd.DataFrame({'node_id': [pos_alert['node_id'].values[0]], 'TNL': [max(node_alert['node_alert'].values)]})
    else:
        trending_alert = pd.DataFrame({'node_id': [pos_alert['node_id'].values[0]], 'TNL': [0]})
    
    return trending_alert
예제 #4
0
def main():
    URL = "https://earthquake.phivolcs.dost.gov.ph/"
    eq_table = get_eq(URL)
    eq_data = read_tbl(eq_table, get_eqtbl_headers(eq_table))

    query = "SELECT * FROM earthquake_events ORDER BY ts DESC LIMIT 1"
    start = pd.to_datetime(
        db.df_read(query, connection='analysis').ts.values[0]) - timedelta(1)
    eq_data = eq_data.loc[
        eq_data.ts >= start,
        ['ts', 'latitude', 'longitude', 'depth', 'magnitude', 'province']]
    eq_data.loc[:, 'issuer'] = 'PHIV'
    data_table = sms.DataTable('earthquake_events', eq_data)
    db.df_write(data_table, connection='analysis')
예제 #5
0
def alert_to_db(df, table_name):
    """Summary of cumulative rainfall, threshold, alert and rain gauge used in
    analysis of rainfall.
    
    Args:
        df (dataframe): Dataframe to be written to database.
        table_name (str): Name of table df to be written to.
    
    """

    if does_table_exist(table_name) == False:
        #Create a tsm_alerts table if it doesn't exist yet
        if table_name == 'tsm_alerts':
            create_tsm_alerts()
        #Create a public_alerts table if it doesn't exist yet
        elif table_name == 'public_alerts':
            create_public_alerts()
        #Create a operational_triggers table if it doesn't exist yet
        elif table_name == 'operational_triggers':
            create_operational_triggers()
        else:
            print_out('unrecognized table : ' + table_name)
            return

    if table_name == 'operational_triggers':
        # checks trigger source
        query = "SELECT * FROM "
        query += "  operational_trigger_symbols AS op "
        query += "INNER JOIN "
        query += "  trigger_hierarchies AS trig "
        query += "ON op.source_id = trig.source_id "
        all_trig = db.df_read(query)
        trigger_source = all_trig[all_trig.trigger_sym_id == \
                    df['trigger_sym_id'].values[0]]['trigger_source'].values[0]

        # does not write nd subsurface alerts
        if trigger_source == 'subsurface':
            alert_level = all_trig[all_trig.trigger_sym_id == \
                    df['trigger_sym_id'].values[0]]['alert_level'].values[0]
            if alert_level == -1:
                return
        # if ts does not exist, writes alert; else: updates alert level
        elif trigger_source == 'surficial':

            query = "SELECT trigger_id, trig.trigger_sym_id FROM "
            query += "  (SELECT trigger_sym_id, alert_level, alert_symbol, "
            query += "  op.source_id, trigger_source FROM "
            query += "    operational_trigger_symbols AS op "
            query += "  INNER JOIN "
            query += "    (SELECT * FROM trigger_hierarchies "
            query += "    WHERE trigger_source = '%s' " % trigger_source
            query += "    ) AS trig "
            query += "  ON op.source_id = trig.source_id "
            query += "  ) AS sym "
            query += "INNER JOIN "
            query += "  (SELECT * FROM operational_triggers "
            query += "  WHERE site_id = %s " % df['site_id'].values[0]
            query += "  AND ts = '%s' " % df['ts'].values[0]
            query += "  ) AS trig "
            query += "ON trig.trigger_sym_id = sym.trigger_sym_id"
            surficial = db.df_read(query)

            if len(surficial) == 0:
                data_table = sms.DataTable(table_name, df)
                db.df_write(data_table)
            else:
                trigger_id = surficial['trigger_id'].values[0]
                trigger_sym_id = df['trigger_sym_id'].values[0]
                if trigger_sym_id != surficial['trigger_sym_id'].values[0]:
                    query = "UPDATE %s " % table_name
                    query += "SET trigger_sym_id = '%s' " % trigger_sym_id
                    query += "WHERE trigger_id = %s" % trigger_id
                    db.write(query)

            return

        query = "SELECT * FROM "
        query += "  (SELECT trigger_sym_id, alert_level, alert_symbol, "
        query += "    op.source_id, trigger_source FROM "
        query += "      operational_trigger_symbols AS op "
        query += "    INNER JOIN "
        query += "      (SELECT * FROM trigger_hierarchies "
        query += "      WHERE trigger_source = '%s' " % trigger_source
        query += "      ) AS trig "
        query += "    ON op.source_id = trig.source_id "
        query += "    ) AS sym "
        query += "INNER JOIN "
        query += "  ( "

    else:
        query = ""

    if table_name == 'tsm_alerts':
        where_id = 'tsm_id'
    else:
        where_id = 'site_id'

    ts_updated = pd.to_datetime(
        df['ts_updated'].values[0]) - timedelta(hours=0.5)

    # previous alert
    query += "  SELECT * FROM %s " % table_name
    query += "  WHERE %s = %s " % (where_id, df[where_id].values[0])
    query += "  AND ((ts <= '%s' " % df['ts_updated'].values[0]
    query += "    AND ts_updated >= '%s') " % df['ts_updated'].values[0]
    query += "  OR (ts_updated <= '%s' " % df['ts_updated'].values[0]
    query += "    AND ts_updated >= '%s')) " % ts_updated

    if table_name == 'operational_triggers':

        query += "  ) AS trig "
        query += "ON trig.trigger_sym_id = sym.trigger_sym_id "

    query += "ORDER BY ts DESC LIMIT 1"

    df2 = db.df_read(query)

    if table_name == 'public_alerts':
        query = "SELECT * FROM %s " % table_name
        query += "WHERE site_id = %s " % df['site_id'].values[0]
        query += "AND ts = '%s' " % df['ts'].values[0]
        query += "AND pub_sym_id = %s" % df['pub_sym_id'].values[0]

        df2 = df2.append(db.df_read(query))

    # writes alert if no alerts within the past 30mins
    if len(df2) == 0:
        data_table = sms.DataTable(table_name, df)
        db.df_write(data_table)
    # does not update ts_updated if ts in written ts to ts_updated range
    elif pd.to_datetime(df2['ts_updated'].values[0]) >= \
                  pd.to_datetime(df['ts_updated'].values[0]):
        pass
    # if diff prev alert, writes to db; else: updates ts_updated
    else:
        if table_name == 'tsm_alerts':
            alert_comp = 'alert_level'
            pk_id = 'ta_id'
        elif table_name == 'public_alerts':
            alert_comp = 'pub_sym_id'
            pk_id = 'public_id'
        else:
            alert_comp = 'trigger_sym_id'
            pk_id = 'trigger_id'

        same_alert = df2[alert_comp].values[0] == df[alert_comp].values[0]

        try:
            same_alert = same_alert[0]
        except:
            pass

        if not same_alert:
            data_table = sms.DataTable(table_name, df)
            db.df_write(data_table)
        else:
            query = "UPDATE %s " % table_name
            query += "SET ts_updated = '%s' " % df['ts_updated'].values[0]
            query += "WHERE %s = %s" % (pk_id, df2[pk_id].values[0])
            db.write(query)
예제 #6
0
def summary_writer(site_id, site_code, gauge_name, rain_id, twoyrmax, halfmax,
                   rainfall, end, write_alert):
    """Summary of cumulative rainfall, threshold, alert and rain gauge used in
    analysis of rainfall.
    
    Args:
        site_id (int): ID per site.
        site_code (str): Three-letter code per site.
        gauge_name (str): Rain gauge used in rainfall analysis.
        rain_id (int): ID of gauge_name.
        twoyrmax (float): Threshold for 3-day cumulative rainfall per site.
        halfmax (float): Threshold for 1-day cumulative rainfall per site.
        rainfall (str): Data to compute cumulative rainfall from.
        end (datetime): End timestamp of alert to be computed.
        write_alert (bool): To write alert in database.

    Returns:
        dataframe: Summary of cumulative rainfall, threshold, alert and 
                   rain gauge used in analysis of rainfall.
    
    """

    one, three = one_three_val_writer(rainfall, end)

    #threshold is reached
    if one >= halfmax or three >= twoyrmax:
        ralert = 1
    #no data
    elif one == None or math.isnan(one):
        ralert = -1
    #rainfall below threshold
    else:
        ralert = 0

    if write_alert or ralert == 1:
        if qdb.does_table_exist('rainfall_alerts') == False:
            #Create a site_alerts table if it doesn't exist yet
            qdb.create_rainfall_alerts()

        columns = ['rain_alert', 'cumulative', 'threshold']
        alerts = pd.DataFrame(columns=columns)

        if ralert == 0:
            if one >= halfmax * 0.75:
                temp_df = pd.Series(['x', one, halfmax], index=columns)
            elif three >= twoyrmax * 0.75:
                temp_df = pd.Series(['x', three, twoyrmax], index=columns)
            else:
                temp_df = pd.Series([False, np.nan, np.nan], index=columns)

            alerts = alerts.append(temp_df, ignore_index=True, sort=False)
        else:
            if one >= halfmax:
                temp_df = pd.Series(['a', one, halfmax], index=columns)
                alerts = alerts.append(temp_df, ignore_index=True, sort=False)
            if three >= twoyrmax:
                temp_df = pd.Series(['b', three, twoyrmax], index=columns)
                alerts = alerts.append(temp_df, ignore_index=True, sort=False)
            if ralert == -1:
                temp_df = pd.Series([False, np.nan, np.nan], index=columns)
                alerts = alerts.append(temp_df, ignore_index=True, sort=False)

        if alerts['rain_alert'][0] != False:
            for index, row in alerts.iterrows():
                rain_alert = row['rain_alert']
                cumulative = row['cumulative']
                threshold = row['threshold']
                if qdb.does_alert_exists(site_id, end,
                                         rain_alert).values[0][0] == 0:
                    df = pd.DataFrame({
                        'ts': [end],
                        'site_id': [site_id],
                        'rain_id': [rain_id],
                        'rain_alert': [rain_alert],
                        'cumulative': [cumulative],
                        'threshold': [threshold]
                    })
                    data_table = sms.DataTable('rainfall_alerts', df)
                    db.df_write(data_table)

    summary = pd.DataFrame({
        'site_id': [site_id],
        'site_code': [site_code],
        '1D cml': [one],
        'half of 2yr max': [round(halfmax, 2)],
        '3D cml': [three],
        '2yr max': [round(twoyrmax, 2)],
        'DataSource': [gauge_name],
        'rain_id': [rain_id],
        'alert': [ralert]
    })

    return summary
예제 #7
0
def write_marker_alerts(df, connection='analysis'):
    data_table = sms.DataTable('marker_alerts', df)
    db.df_write(data_table, connection=connection)
예제 #8
0
def write_rain_data(gauge_name, df, connection='analysis'):
    data_table = sms.DataTable(gauge_name, df)
    db.df_write(data_table, connection=connection)
예제 #9
0
def write_rain_gauges(df, connection='analysis'):
    data_table = sms.DataTable('rainfall_gauges', df)
    db.df_write(data_table, connection=connection)