コード例 #1
0
def site_alerts(curr_trig, ts, release_data_ts, connection):
    df = curr_trig.drop_duplicates(
        ['site_id', 'trigger_source', 'alert_level'])
    site_id = df['site_id'].values[0]

    query = "SELECT trigger_id, MAX(ts_last_retrigger) ts_last_retrigger FROM alert_status"
    query += " WHERE trigger_id IN (%s)" %(','.join(map(lambda x: str(x), \
                                         set(df['trigger_id'].values))))
    query += " GROUP BY trigger_id"
    written = db.df_read(query, connection=connection)

    site_curr_trig = pd.merge(df, written, how='left')
    site_curr_trig = site_curr_trig.loc[
        (site_curr_trig.ts_last_retrigger +
         timedelta(1) < site_curr_trig.ts_updated) |
        (site_curr_trig.ts_last_retrigger.isnull()), :]

    if len(site_curr_trig) == 0:
        qdb.print_out('no new trigger for site_id %s' % site_id)
        return

    alert_status = site_curr_trig[['ts_updated', 'trigger_id']]
    alert_status = alert_status.rename(
        columns={'ts_updated': 'ts_last_retrigger'})
    alert_status['ts_set'] = datetime.now()
    data_table = sms.DataTable('alert_status', alert_status)
    db.df_write(data_table, connection=connection)
コード例 #2
0
def trending_alert_gen(pos_alert, tsm_id, end):
    
    if qdb.does_table_exist('node_alerts') == False:
        #Create a node_alerts table if it doesn't exist yet
        create_node_alerts()
            
    query = "SELECT EXISTS(SELECT * FROM node_alerts"
    query += " WHERE ts = '%s'" %end
    query += " and tsm_id = %s and node_id = %s)" %(tsm_id, pos_alert['node_id'].values[0])
    
    if db.df_read(query, connection='local').values[0][0] == 0:
        node_alert = pos_alert[['disp_alert', 'vel_alert']]
        node_alert['ts'] = end
        node_alert['tsm_id'] = tsm_id
        node_alert['node_id'] = pos_alert['node_id'].values[0]
        data_table = sms.DataTable('node_alerts', node_alert)
        db.df_write(data_table, connection='local')
    
    query = "SELECT * FROM node_alerts WHERE tsm_id = %s and node_id = %s and ts >= '%s'" %(tsm_id, pos_alert['node_id'].values[0], end-timedelta(hours=3))
    node_alert = db.df_read(query, connection='local')
    
    node_alert['node_alert'] = np.where(node_alert['vel_alert'].values >= node_alert['disp_alert'].values,

                             #node alert takes the higher perceive risk between vel alert and disp alert
                             node_alert['vel_alert'].values,                                

                             node_alert['disp_alert'].values)
    
    if len(node_alert[node_alert.node_alert > 0]) > 3:        
        trending_alert = pd.DataFrame({'node_id': [pos_alert['node_id'].values[0]], 'TNL': [max(node_alert['node_alert'].values)]})
    else:
        trending_alert = pd.DataFrame({'node_id': [pos_alert['node_id'].values[0]], 'TNL': [0]})
    
    return trending_alert
コード例 #3
0
ファイル: optrig.py プロジェクト: dynatech/updews-pycodes
def earthquake(site_id, ts):
    """Insert values to earthquake_events, earthquake_alerts, and 
    operational_triggers to (re)trigger subsurface alert.
    
    Args:
        site_id (int): ID of site to compute earthquake analysis for.
        ts (datetime): Timestamp of alert trigger.
    """

    # writes to earthquake_events; defaults epicenter to site coordinates, depth to 0, and magnitude to 4.3
    sites = eq.get_sites()
    earthquake_events = sites.loc[sites.site_id == site_id, ['latitude', 'longitude', 'province']]
    earthquake_events.loc[:, 'ts'] = ts
    earthquake_events.loc[:, 'magnitude'] = 4.3
    earthquake_events.loc[:, 'depth'] = 0
    earthquake_events.loc[:, 'critical_distance'] = np.round(eq.get_crit_dist(4.3), decimals=2)
    earthquake_events.loc[:, 'issuer'] = 'TOPSSOFTWAREINFRA'
    earthquake_events.loc[:, 'processed'] = 1
    eq_id = int(db.df_write(data_table = sms.DataTable("earthquake_events", earthquake_events), resource='sensor_data', last_insert=True)[0][0])
    
    # writes to earthquake_alerts
    earthquake_alerts = pd.DataFrame({'eq_id': [eq_id], 'site_id': [site_id], 'distance': [0]})
    db.df_write(data_table = sms.DataTable("earthquake_alerts", earthquake_alerts), resource='sensor_data')
    
    # writes to operational_triggers
    trigger_symbol = mem.get('df_trigger_symbols')
    trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'earthquake') & (trigger_symbol.alert_level == 1), 'trigger_sym_id'].values[0]
    operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]})
    qdb.alert_to_db(operational_trigger, 'operational_triggers')
    
    # details for trigger tech info
    earthquake_events.loc[:, 'distance'] = 0

    return earthquake_events
コード例 #4
0
ファイル: lib.py プロジェクト: dynatech/updews-pycodes
def send_unsent_notif(df, notif_type, curr_release, validation=False):
    ts = curr_release.strftime('%I%p %B %d, %Y')
    if len(df) != 0:
        site_notif = '\n'.join(list(map(lambda x: ': '.join(x), df.values)))
        if validation:
            sms_msg = 'Validate measurements with displacement of 1cm and more:\n\n' + site_notif
        else:
            sms_msg = 'Unsent ' + notif_type + ' (' + ts + '):\n\n' + site_notif
        smsoutbox_user_status = get_recipient(curr_release)
    else:
        if notif_type == 'gndmeas':
            return
        sms_msg = 'Sent all ' + notif_type + ' (' + ts + ')'
        smsoutbox_user_status = get_recipient(curr_release, unsent=False)
    smsoutbox_users = pd.DataFrame({
        'sms_msg': [sms_msg],
        'source': ['central']
    })
    data_table = sms.DataTable('smsoutbox_users', smsoutbox_users)
    outbox_id = db.df_write(data_table, connection='gsm_pi',
                            last_insert=True)[0][0]

    smsoutbox_user_status.loc[:, 'outbox_id'] = outbox_id
    data_table = sms.DataTable('smsoutbox_user_status', smsoutbox_user_status)
    db.df_write(data_table, connection='gsm_pi')
コード例 #5
0
def dftosql(df):
    v2df = get_loggers_v2()
    v3df = get_loggers_v3()
    logger_active = pd.DataFrame()
    loggers = v2df.append(v3df).reset_index()

    logger_active = pd.DataFrame()
    for i in range(0, len(v2df)):
        logger_active = logger_active.append(get_data_tsm(v2df.logger_name[i]))

    for i in range(0, len(v3df)):
        logger_active = logger_active.append(
            get_data_rain(v3df.logger_name[i]))

    logger_active = logger_active.reset_index()
    timeNow = datetime.today()
    df['last_data'] = logger_active['max(ts)']
    df['last_data'] = pd.to_datetime(df['last_data'])
    df['ts_updated'] = timeNow
    df['logger_id'] = loggers.logger_id
    diff = df['ts_updated'] - df['last_data']
    tdta = diff
    fdta = tdta.astype('timedelta64[D]')
    df['diff_days'] = fdta

    df.loc[(df['diff_days'] > -1) & (df['diff_days'] < 3), 'presence'] = 'active'
    df['presence'] = df['diff_days'].apply(lambda x: '1' if x <= 3 else '0')

    data_table = sms.DataTable('data_presence_loggers', df)
    db.df_write(data_table, connection='analysis')

    return df
コード例 #6
0
def main():
    """Writes in rainfall_gauges information on available rain gauges 
     for rainfall alert analysis

    """

    start = datetime.now()
    qdb.print_out(start)

    if qdb.does_table_exist('rainfall_gauges') == False:
        #Create a rainfall_gauges table if it doesn't exist yet
        qdb.create_rainfall_gauges()

    senslope = mem.get('df_dyna_rain_gauges')
    senslope = senslope.loc[senslope.has_rain == 1, :]
    senslope.loc[:, 'data_source'] = 'senslope'

    noah = noah_gauges()

    all_gauges = senslope.append(noah, sort=False)
    all_gauges.loc[:, 'gauge_name'] = all_gauges.loc[:, 'gauge_name'].apply(
        lambda x: str(x))
    all_gauges.loc[:, 'date_activated'] = pd.to_datetime(
        all_gauges.loc[:, 'date_activated'])
    written_gauges = mem.get('df_rain_gauges')
    not_written = set(all_gauges['gauge_name']) \
                     - set(written_gauges['gauge_name'])

    new_gauges = all_gauges.loc[all_gauges.gauge_name.isin(not_written), :]
    new_gauges = new_gauges.loc[new_gauges.date_deactivated.isnull(), :]
    new_gauges = new_gauges.loc[:, [
        'gauge_name', 'data_source', 'longitude', 'latitude', 'date_activated'
    ]]
    if len(new_gauges) != 0:
        data_table = sms.DataTable('rainfall_gauges', new_gauges)
        db.df_write(data_table)

    deactivated = written_gauges.loc[
        ~written_gauges.date_deactivated.isnull(), :]

    deactivated_gauges = all_gauges.loc[(~all_gauges.date_deactivated.isnull()) \
                                  & (~all_gauges.gauge_name.isin(not_written))\
                                  & (~all_gauges.gauge_name.isin(deactivated.gauge_name)), :]
    date_deactivated = pd.to_datetime(
        deactivated_gauges.loc[:, 'date_deactivated'])
    deactivated_gauges.loc[:, 'date_deactivated'] = date_deactivated
    deactivated_gauges = deactivated_gauges.loc[:, [
        'gauge_name', 'data_source', 'longitude', 'latitude', 'date_activated'
    ]]
    if len(deactivated_gauges) != 0:
        data_table = sms.DataTable('rainfall_gauges', deactivated_gauges)
        db.df_write(data_table)

    qdb.print_out('runtime = %s' % (datetime.now() - start))
コード例 #7
0
def main(key):
    ts = datetime.now()
    sheet_name = ts.strftime('%B %Y')
    shift_sched = get_shift(key, sheet_name)
    try:
        sheet_name = (ts+timedelta(weeks=2)).strftime('%B %Y')
        shift_sched = shift_sched.append(get_shift(key, sheet_name))
    except:
        print("no shift schedule for next month")
    data_table = sms.DataTable('monshiftsched', shift_sched)
    db.df_write(data_table)
コード例 #8
0
def main():
    query = 'SELECT * FROM markers'
    markers = db.df_read(query)
    query = "SELECT * FROM marker_observations"
    mo = db.df_read(query)
    query = "SELECT * FROM marker_data"
    md = db.df_read(query)
    query = "SELECT ma_id, ts, marker_id FROM marker_alerts"
    ma = db.df_read(query)
    
    marker_alerts = pd.merge(ma, markers, on='marker_id', validate='m:1')
    marker_alerts = pd.merge(marker_alerts, mo, on=['site_id', 'ts'],
                             validate='m:1')
    marker_alerts = pd.merge(marker_alerts, md, on=['mo_id', 'marker_id'],
                             validate='m:1')
    marker_alerts = marker_alerts.drop_duplicates(['ts', 'marker_id'],
                                                  keep='last')
    
    # delete marker_alerts not in marker_observations and duplicated marker_alerts
    ma_id = set(ma['ma_id']) - set(marker_alerts['ma_id'])
    if len(ma_id) != 0:
        query = 'DELETE FROM marker_alerts WHERE ma_id in %s' %str(tuple(ma_id))
        qdb.execute_query(query)
    
    try:
        query = 'ALTER TABLE marker_alerts ADD UNIQUE INDEX uq_marker_alerts (marker_id ASC, ts ASC)'
        qdb.execute_query(query)
    except:
        pass
    
    try:
        query =  "ALTER TABLE marker_alerts "
        query += "ADD UNIQUE INDEX uq_marker_alerts1 (data_id ASC); "
        qdb.execute_query(query)
    except:
        pass
    
    try:
        query =  "ALTER TABLE marker_alerts "
        query += "ADD CONSTRAINT fk_marker_data "
        query += "  FOREIGN KEY (data_id) "
        query += "  REFERENCES marker_data (data_id) "
        query += "  ON DELETE CASCADE "
        query += "  ON UPDATE CASCADE; "
        qdb.execute_query(query)
    except:
        pass
    
    data_table = sms.DataTable('marker_alerts',
                               marker_alerts[['ts', 'marker_id', 'data_id']])
    db.df_write(data_table)
コード例 #9
0
def main():
    URL = "https://earthquake.phivolcs.dost.gov.ph/"
    eq_table = get_eq(URL)
    eq_data = read_tbl(eq_table, get_eqtbl_headers(eq_table))

    query = "SELECT * FROM earthquake_events ORDER BY ts DESC LIMIT 1"
    start = pd.to_datetime(
        db.df_read(query, connection='analysis').ts.values[0]) - timedelta(1)
    eq_data = eq_data.loc[
        eq_data.ts >= start,
        ['ts', 'latitude', 'longitude', 'depth', 'magnitude', 'province']]
    eq_data.loc[:, 'issuer'] = 'PHIV'
    data_table = sms.DataTable('earthquake_events', eq_data)
    db.df_write(data_table, connection='analysis')
コード例 #10
0
def generate_surficial_alert(site_id = None,ts = None):
    """
    Main alert generating function for surificial alert for a site at specified time
    
    Parameters
    ------------------
    site_id: int
        site_id of site of interest
    ts: timestamp
        timestamp of alert generation
        
    Returns
    -------------------
    Prints the generated alert and writes to marker_alerts database
    """
    #### Obtain system arguments from command prompt
    if site_id == None and ts == None:
        site_id, ts = sys.argv[1].lower(),sys.argv[2].lower()
    
    #### Config variables
    num_pts = int(sc['surficial']['surficial_num_pts'])
    ts_start = pd.to_datetime(ts) - timedelta(sc['surficial']['meas_plot_window'])

    #### Get latest ground data
    surficial_data_df = qdb.get_surficial_data(site_id, ts_start, ts, num_pts)
    
    #### Generate Marker alerts
    marker_data_df = surficial_data_df.groupby('marker_id',as_index = False)
    marker_alerts = marker_data_df.apply(evaluate_marker_alerts, ts)

    #### Write to marker_alerts table    
    data_table = sms.DataTable('marker_alerts', marker_alerts)
    db.df_write(data_table)

    #### Generate surficial alert for site
    surficial_alert = get_surficial_alert(marker_alerts,site_id)
    #### Write to db
    qdb.alert_to_db(surficial_alert,'operational_triggers')
    
    #### Plot current ground meas    
    if sc['surficial']['print_meas_plot']:
        ### Retreive the surficial data to plot
        surficial_data_to_plot = surficial_data_df.loc[surficial_data_df.ts >= ts_start, :]
        ### Plot the surficial data
        plot_site_meas(surficial_data_to_plot, ts)
    
    return surficial_data_df
コード例 #11
0
def to_mysql(df):
    """Writes in rainfall_priorities the distance of 4 active nearby
    rain gauges from the site.
    
    Args:
        df (dataframe): Record of 4 nearby rain gauges with 
        its distance from the site.

    """
    written_df = mem.get('df_rain_priorities')
    combined = written_df.append(df, ignore_index=True, sort=False)
    combined = combined.append(written_df, ignore_index=True, sort=False)
    combined = combined.drop_duplicates(['site_id', 'rain_id'], keep=False)

    if len(combined) > 0:
        data_table = sms.DataTable('rainfall_priorities', combined)
        db.df_write(data_table)
コード例 #12
0
ファイル: optrig.py プロジェクト: dynatech/updews-pycodes
def subsurface(site_id, ts, alert_level):
    """Insert values to node_alerts, tsm_alerts, and operational_triggers
    to (re)trigger subsurface alert.
    
    Args:
        site_id (int): ID of site to compute subsurface analysis for.
        ts (datetime): Timestamp of alert trigger.
        alert_level (int: {0, 2, 3}, default None): Subsurface alert level.
    """
    
    # get tsm_id
    query = "SELECT tsm_id FROM tsm_sensors "
    query += "where site_id = {} ".format(site_id)
    query += "and (date_deactivated is null or date_deactivated > '{}')".format(ts)
    tsm_id = db.df_read(query, resource='sensor_data').values.flatten()
    tsm_id = random.choice(tsm_id)
    
    # writes to node_alerts; defaults to node 1 and vel_alert
    ts_list = pd.date_range(end=ts, freq='30min', periods=4)
    node_alerts = pd.DataFrame({'ts': ts_list, 'node_id': [1]*len(ts_list),
                                'tsm_id': [tsm_id]*len(ts_list),
                                'disp_alert': [0]*len(ts_list),
                                'vel_alert': [alert_level]*len(ts_list)})
    db.df_write(data_table = sms.DataTable("node_alerts", node_alerts), resource='sensor_data')
    
    # writes to tsm_alerts
    tsm_alerts = pd.DataFrame({'ts': [ts], 'tsm_id': [tsm_id],
                               'alert_level': [alert_level],
                               'ts_updated': [ts]})
    db.df_write(data_table = sms.DataTable("tsm_alerts", tsm_alerts), resource='sensor_data')

    # writes to operational_triggers
    trigger_symbol = mem.get('df_trigger_symbols')
    trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'subsurface') & (trigger_symbol.alert_level == alert_level), 'trigger_sym_id'].values[0]
    operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]})
    qdb.alert_to_db(operational_trigger, 'operational_triggers')

    # details for trigger tech info
    tsm_alerts.loc[:, 'node_id'] = 1
    tsm_alerts.loc[:, 'disp_alert'] = 0
    tsm_alerts.loc[:, 'vel_alert'] = alert_level
    
    return tsm_alerts
コード例 #13
0
def send_unsent_notif(df, curr_release):
    ts = curr_release.strftime('%I%p %B %d, %Y')
    if len(df) != 0:
        unsent_ewi = '\n'.join(list(map(lambda x: ': '.join(x), df.values)))
        sms_msg = 'Unsent EWI SMS (' + ts + '):\n\n' + unsent_ewi
        smsoutbox_user_status = get_recipient(curr_release)
    else:
        sms_msg = 'Sent all EWI SMS (' + ts + ')'
        smsoutbox_user_status = get_recipient(curr_release, unsent=False)
    smsoutbox_users = pd.DataFrame({
        'sms_msg': [sms_msg],
        'source': ['central']
    })
    data_table = sms.DataTable('smsoutbox_users', smsoutbox_users)
    outbox_id = db.df_write(data_table, connection='gsm_pi',
                            last_insert=True)[0][0]

    smsoutbox_user_status.loc[:, 'outbox_id'] = outbox_id
    data_table = sms.DataTable('smsoutbox_user_status', smsoutbox_user_status)
    db.df_write(data_table, connection='gsm_pi')
コード例 #14
0
def send_reminder(ts = datetime.now()):
    shift_ts = release_time(ts)+timedelta(0.5)
    
    query = """SELECT * FROM monshiftsched
            WHERE ts = '{}'""".format(shift_ts)
    df = db.df_read(query)
    df = df.rename(columns={'iompmt': 'MT', 'iompct': 'CT'})
    
    sched = (shift_ts-timedelta(hours=0.25)).strftime("%B %d, %Y %I:%M%p")
    greeting = ts.strftime("%p")
    if greeting == 'AM':
        greeting = 'morning'
    else:
        greeting = 'evening'
        
    query = """SELECT site_code FROM commons_db.sites WHERE active = 1 
            AND site_id NOT IN (SELECT site_id FROM analysis_db.markers WHERE in_use = 1)"""
    sites = ', '.join(db.df_read(query, resource= "sensor_analysis").values.flatten()).upper()
    sites = ', and'.join(sites.rsplit(',', 1))
    
    IOMP_dict = df.loc[:, ['MT', 'CT']].to_dict(orient='records')[0]
    IOMP_num = get_mobile()
    for IOMP, name in IOMP_dict.items():
        sms_msg = ("Monitoring shift reminder:\n\n"
                "Good {} {}, you are assigned to be the IOMP-{} for {}.\n\n"
                "Please be reminded that sites {} currently do not have markers installed. "
                "Instead of asking for ground measurement, please ask for "
                "ground observation.").format(greeting, name, IOMP, sched, sites)
        print(sms_msg, '\n')
        outbox = pd.DataFrame({'sms_msg': [sms_msg], 'source': ['central']})
        mobile_id = IOMP_num.loc[IOMP_num.nickname == name, 'mobile_id'].values
        gsm_id = IOMP_num.loc[IOMP_num.nickname == name, 'gsm_id'].values
        if len(mobile_id) != 0 and len(gsm_id) != 0:
            data_table = sms.DataTable('smsoutbox_users', outbox)
            outbox_id = db.df_write(data_table, resource='sms_data', last_insert=True)[0][0]
            status = pd.DataFrame({'outbox_id': [outbox_id]*len(mobile_id), 'mobile_id': mobile_id,
                                   'gsm_id': gsm_id})
            data_table = sms.DataTable('smsoutbox_user_status', status)
            db.df_write(data_table, resource='sms_data')
        else:
            print("No mobile number")
コード例 #15
0
def send_notif(ts=datetime.now()):
    start = lib.release_time(ts) - timedelta(hours=4)
    if (ts - start).total_seconds() / 3600 < 1.5:
        notif = olivia.main(ts)
        sms_msg = '\n'.join(
            list(filter(lambda x: x.startswith('Un'), notif.split('\nNo '))))
    else:
        sms_msg = gndmeas.main(ts)
    if sms_msg != '':
        smsoutbox_user_status = get_recipient(ts)
        smsoutbox_users = pd.DataFrame({
            'sms_msg': [sms_msg],
            'source': ['central']
        })
        data_table = sms.DataTable('smsoutbox_users', smsoutbox_users)
        outbox_id = db.df_write(data_table,
                                connection='gsm_pi',
                                last_insert=True)[0][0]

        smsoutbox_user_status.loc[:, 'outbox_id'] = outbox_id
        data_table = sms.DataTable('smsoutbox_user_status',
                                   smsoutbox_user_status)
        db.df_write(data_table, connection='gsm_pi')
コード例 #16
0
def update_table_data(noah_id, gauge_name, fdate, tdate, noah_gauges):
    """Updates data of table gauge_name from fdate to tdate.
    
    Args:
        noah_id (int): Device id of noah data.
        gauge_name (str): Name of table containing rainfall data of noah_id.
        fdate (timestamp): Timestamp start of data to be downloaded.
        tdate (timestamp): Timestamp end of data to be downloaded.
        noah_gauges (dataframe): Rain gauge properties- id, name, data source.

    """

    noah_data = download_rainfall_noah(noah_id, fdate, tdate)
    cur_ts = datetime.now()

    if noah_data.empty:
        qdb.print_out("    no data...")

        #Insert an entry with values: [timestamp, -1] as a marker
        #-1 values should not be included in computation of cml rainfall
        if pd.to_datetime(tdate) <= cur_ts:
            place_holder_data = pd.DataFrame({"ts": [tdate], "rain": [-1.0]})
            data_table = sms.DataTable(gauge_name, place_holder_data)
            db.df_write(data_table)

    else:
        #Insert the new data on the noahid table
        data_table = sms.DataTable(gauge_name, noah_data)
        db.df_write(data_table)

    #The table is already up to date
    if pd.to_datetime(tdate) > cur_ts:
        return
    else:
        #call this function again until the maximum recent timestamp is hit
        update_single_table(noah_gauges)
コード例 #17
0
ファイル: eqevents.py プロジェクト: dynatech/updews-pycodes
def main(hours=''):
    auth_api = get_auth()
    username = '******'
    
    if hours == '':
        try:
            hours = int(sys.argv[1])
        except:
            hours = 0.25
    end_date = datetime.utcnow() - timedelta(hours=hours)

    for status in Cursor(auth_api.user_timeline, id=username).items():
        text = auth_api.get_status(status.id, tweet_mode="extended").full_text
        if 'earthquake' in text.lower():
            try:
                print(status.created_at)
                df = get_eq_events(text)
                data_table = sms.DataTable('earthquake_events', df)
                db.df_write(data_table)
            except:
                pass
    
        if status.created_at < end_date:
            break
コード例 #18
0
def write_observation(surf_df, site_id):
    mo_df = surf_df.loc[:, ['site_id', 'ts', 'meas_type', 'observer_name']]
    mo_df.loc[:, 'data_source'] = 'ops'
    mo_df.loc[:, 'reliability'] = 1
    mo_df.loc[:, 'weather'] = 'maaraw'
    mo_id = dbio.df_write(data_table=smsclass.DataTable(
        "marker_observations", mo_df),
                          resource='sensor_data',
                          last_insert=True)[0][0]
    if mo_id == 0:
        query = "SELECT marker_observations.mo_id FROM marker_observations "
        query += "WHERE ts = '{ts}' and site_id = '{site_id}'"
        query = query.format(ts=surf_df['ts'].values[0], site_id=site_id)
        mo_id = dbio.read(query, resource='sensor_data')[0][0]
    surf_df = surf_df.dropna(axis=1)
    md_df = surf_df.loc[:,
                        surf_df.columns.astype(str).str.isnumeric()].transpose(
                        )
    md_df = md_df.reset_index()
    md_df.columns = ['marker_id', 'measurement']
    md_df.loc[:, 'mo_id'] = mo_id
    dbio.df_write(data_table=smsclass.DataTable("marker_data", md_df),
                  resource='sensor_data')
    ma.generate_surficial_alert(site_id, ts=mo_df.ts.values[0])
コード例 #19
0
def drift_detection(acc_id="", f_time=pd.to_datetime(dt.now() - td(weeks=12))):
    accelerometers = memory.get('DF_ACCELEROMETERS')
    acc_det = accelerometers[accelerometers.accel_id == acc_id].iloc[0]

    try:
        df = q.get_raw_accel_data(tsm_id=acc_det.tsm_id,
                                  node_id=acc_det.node_id,
                                  accel_number=acc_det.accel_number,
                                  from_time=f_time)
    #lagpas yung node_id
    except ValueError:
        return 0
    #walang table ng tilt_***** sa db
    except AttributeError:
        return 0

    #walang laman yung df
    if df.empty:
        return 0

    #Resample 30min
    df = df.set_index('ts').resample('30min').first()

    #Integer index
    N = len(df.index)
    df['i'] = range(1, N + 1, 1)

    # Compute accelerometer raw value
    df.x[df.x < -2048] = df.x[df.x < -2048] + 4096
    df.y[df.y < -2048] = df.y[df.y < -2048] + 4096
    df.z[df.z < -2048] = df.z[df.z < -2048] + 4096

    # Compute accelerometer magnitude
    df['mag'] = ((df.x / 1024) * (df.x / 1024) + (df.y / 1024) *
                 (df.y / 1024) + (df.z / 1024) * (df.z / 1024)).apply(np.sqrt)

    #count number of data
    dfw = pd.DataFrame()
    dfw['count'] = df.mag.resample('1W').count()

    # Filter data with very big/small magnitude
    df[df.mag > 3.0] = np.nan
    df[df.mag < 0.5] = np.nan

    # Compute mean and standard deviation in time frame
    df['ave'] = df.mag.rolling(window=12, center=True).mean()
    df['stdev'] = df.mag.rolling(window=12, center=True).std()

    # Filter data with outlier values in time frame
    df[(df.mag > df.ave + 3 * df.stdev) & (df.stdev != 0)] = np.nan
    df[(df.mag < df.ave - 3 * df.stdev) & (df.stdev != 0)] = np.nan

    #interpolate missing data
    df = df.interpolate()

    # Resample every six hours
    df = df.resample('6H').mean()

    # Recompute standard deviation after resampling
    df.stdev = df.mag.rolling(window=2, center=False).std()
    df.stdev = df.stdev.shift(-1)
    df.stdev = df.stdev.rolling(window=2, center=False).mean()

    # Filter data with large standard deviation
    df[df.stdev > 0.05] = np.nan

    # Compute velocity and acceleration of magnitude
    df['vel'] = df.mag - df.mag.shift(1)
    df['acc'] = df.vel - df.vel.shift(1)

    #Resample 1week
    dfw['vel_week'] = df.vel.resample('1W').mean()
    dfw['acc_week'] = df.acc.resample('1W').mean()
    dfw['corr'] = df.resample('1W').mag.corr(df.i)
    dfw['corr'] = dfw['corr']**2

    # Get the data that exceeds the threshold value
    dfw = dfw[(abs(dfw['acc_week']) > 0.000003) & (dfw['corr'] > 0.7) &
              (dfw['count'] >= 84)]

    #Compute the difference for each threshold data
    if len(dfw) > 0:
        dfw = dfw.reset_index()
        dfw['diff_TS'] = dfw.ts - dfw.ts.shift(1)
        dfw['sign'] = dfw.vel_week * dfw.vel_week.shift(1)

    #Check if there are 4 weeks consecutive threshold data
    week = 1
    days = td(days=0)
    while days < td(days=28) and week < len(dfw.index):
        if ((dfw.loc[week]['diff_TS'] <= td(days=14)) &
            (dfw.loc[week]['sign'] > 0)):
            days = days + dfw.loc[week]['diff_TS']
        else:
            days = td(days=0)
        week = week + 1

    if days >= td(days=28):
        print(acc_id, dfw.ts[week - 1])

        #    df['mag'].plot()
        #    plt.savefig(OutputFP+col+nids+a+"-mag")
        #    plt.close()

        dft = pd.DataFrame(columns=['accel_id', 'ts_identified'])
        dft.loc[0] = [acc_id, dfw.ts[week - 1]]

        #save to db
        db.df_write(smsclass.DataTable("drift_detection", dft))

        print("very nice!")
コード例 #20
0
def main():

    eq_events = get_unprocessed()
    sym = get_alert_symbol()
    sites = get_sites()
    dfg = sites.groupby('site_id')
    eq_a = pd.DataFrame(columns=['site_id', 'eq_id', 'distance'])
    EVENTS_TABLE = 'earthquake_events'

    for i in eq_events.index:
        cur = eq_events.loc[i]

        mag = cur.magnitude
        eq_lat = cur.latitude
        eq_lon = cur.longitude
        ts = cur.ts

        critdist = get_crit_dist(mag)
        print(critdist)
        if False in np.isfinite([mag, eq_lat,
                                 eq_lon]):  #has NaN value in mag, lat, or lon
            query = "UPDATE %s SET processed = -1 where eq_id = %s" % (
                EVENTS_TABLE, i)
            dynadb.write(query=query, resource="sensor_data")
            continue

        if mag < 4:
            print("> Magnitude too small: %d" % (mag))
            query = "UPDATE %s SET processed = 1 where eq_id = %s" % (
                EVENTS_TABLE, i)
            dynadb.write(query=query, resource="sensor_data")
            continue
        else:
            print("> Magnitude reached threshold: %d" % (mag))

        # magnitude is big enough to consider
        sites = dfg.apply(get_distance_to_eq, eq_lat=eq_lat, eq_lon=eq_lon)
        print(sites)
        crits = sites.loc[sites.distance <= critdist, :]

        if len(crits) == 0:
            print("> No affected sites. ")
            query = "UPDATE %s SET processed = 1, critical_distance = %s where eq_id = %s" % (
                EVENTS_TABLE, critdist, i)
            dynadb.write(query=query, resource="sensor_data")
            continue
        else:
            #merong may trigger
            print(">> Possible sites affected: %d" %
                  (len(crits.site_id.values)))

        crits.loc[:, 'ts'] = ts
        crits.loc[:, 'source'] = 'earthquake'
        crits.loc[:, 'trigger_sym_id'] = sym
        crits.loc[:, 'ts_updated'] = ts
        crits.loc[:, 'eq_id'] = i

        eq_a = crits.loc[:, ['eq_id', 'site_id', 'distance']]
        op_trig = crits.loc[:,
                            ['ts', 'site_id', 'trigger_sym_id', 'ts_updated']]

        # write to tables
        data_table = sms.DataTable("operational_triggers", op_trig)
        dynadb.df_write(data_table)
        data_table = sms.DataTable("earthquake_alerts", eq_a)
        dynadb.df_write(data_table)

        query = "UPDATE %s SET processed = 1, critical_distance = %s where eq_id = %s " % (
            EVENTS_TABLE, critdist, i)
        dynadb.write(query=query, resource="sensor_data")

        print(">> Alert iniated.\n")
コード例 #21
0
ファイル: optrig.py プロジェクト: dynatech/updews-pycodes
def surficial(site_id, ts, alert_level):
    """Insert values to marker_observations, marker_data, marker_alerts, and 
    operational_triggers to (re)trigger surficial alert.
    
    Args:
        site_id (int): ID of site to compute surficial analysis for.
        ts (datetime): Timestamp of alert trigger.
        alert_level (int: {0, 1, 2, 3}, default None): Surficial alert level.
    """

    # get last data for site_id
    conn = mem.get('DICT_DB_CONNECTIONS')
    query = "SELECT ts, marker_id, marker_name, measurement "
    query += "FROM {analysis}.marker_observations "
    query += "INNER JOIN {common}.sites USING (site_id) "
    query += "INNER JOIN {analysis}.marker_data using (mo_id) "
    query += "INNER JOIN (SELECT data_id, displacement, time_delta, alert_level, processed FROM {analysis}.marker_alerts) sub1 USING (data_id) "
    query += "INNER JOIN (SELECT marker_id, marker_name FROM {analysis}.view_marker_history) sub2 USING (marker_id) "
    query += "WHERE site_id = {site_id} "
    query += "AND ts IN ( "
    query += "  SELECT MAX(ts) FROM {analysis}.marker_observations "
    query += "  WHERE ts < '{ts}' "
    query += "    AND site_id = {site_id})"
    query = query.format(analysis=conn['analysis']['schema'], common=conn['common']['schema'], site_id=site_id, ts=ts)
    df = db.df_read(query, resource='sensor_analysis')
        
    # compute diff in measurements to reach threshold
    if alert_level == 3:
        rate = 1.8
    elif alert_level in (1,2):
        rate = 0.25
    else:
        rate = 0
    meas_diff = np.ceil(rate * (ts-df.ts[0]).total_seconds()/3600)

    # input measurements in inbox
    gndmeas = df.loc[:, ['marker_id', 'marker_name', 'measurement']]
    gndmeas.loc[:, 'ts'] = ts
    gndmeas.loc[:, 'measurement'] += meas_diff
    if alert_level == 1:
        temp_gndmeas = gndmeas.copy()
        temp_gndmeas.loc[:, 'ts'] -= (ts - df.ts[0])/2
        temp_gndmeas.loc[:, 'measurement'] += meas_diff
        # filler measurement for alert level 1
        df_obv = pd.DataFrame({'meas_type': ['ROUTINE'], 'site_id': [site_id],
                               'weather': ['MAARAW'], 'observer_name':['TOPSSOFTWAREINFRA'],
                               'reliability': [1], 'data_source': ['SMS'],
                               'ts': [temp_gndmeas.ts[0]]})
        mo_id = int(db.df_write(data_table=sms.DataTable("marker_observations", 
            df_obv), resource='sensor_data', last_insert=True)[0][0])
        temp_gndmeas.loc[:, 'mo_id'] = mo_id
        df_data = temp_gndmeas.loc[:, ['mo_id', 'marker_id', 'measurement']]
        db.df_write(data_table = sms.DataTable("marker_data", df_data), resource='sensor_data')
        surf.generate_surficial_alert(site_id = site_id, ts = temp_gndmeas.ts[0])
    # measurement for ts given
    df_obv = pd.DataFrame({'meas_type': ['ROUTINE'], 'site_id': [site_id],
                           'weather': ['MAARAW'], 'observer_name':['TOPSSOFTWAREINFRA'],
                           'reliability': [1], 'data_source': ['SMS'],
                           'ts': [ts]})
    mo_id = int(db.df_write(data_table=sms.DataTable("marker_observations", 
        df_obv), resource='sensor_data', last_insert=True)[0][0])
    gndmeas.loc[:, 'mo_id'] = mo_id
    df_data = gndmeas.loc[:, ['mo_id', 'marker_id', 'measurement']]
    db.df_write(data_table = sms.DataTable("marker_data", df_data), resource='sensor_data')
    surf.generate_surficial_alert(site_id = site_id, ts = ts)
    
    # details for trigger tech info
    time_delta = np.round((ts - df.ts[0]).total_seconds()/3600, 2)
    if alert_level == 1:
        time_delta /= 2
    gndmeas.loc[:, 'displacement'] = meas_diff
    gndmeas.loc[:, 'time_delta'] = time_delta
    gndmeas.loc[:, 'alert_level'] = alert_level

    # writes to operational_triggers
    trigger_symbol = mem.get('df_trigger_symbols')
    trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'surficial') & (trigger_symbol.alert_level == alert_level), 'trigger_sym_id'].values[0]
    operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]})
    qdb.alert_to_db(operational_trigger, 'operational_triggers')
    
    return gndmeas
コード例 #22
0
ファイル: querydb.py プロジェクト: dynatech/dynaslope3
def alert_to_db(df, table_name):
    """Summary of cumulative rainfall, threshold, alert and rain gauge used in
    analysis of rainfall.
    
    Args:
        df (dataframe): Dataframe to be written to database.
        table_name (str): Name of table df to be written to.
    
    """

    if does_table_exist(table_name) == False:
        #Create a tsm_alerts table if it doesn't exist yet
        if table_name == 'tsm_alerts':
            create_tsm_alerts()
        #Create a public_alerts table if it doesn't exist yet
        elif table_name == 'public_alerts':
            create_public_alerts()
        #Create a operational_triggers table if it doesn't exist yet
        elif table_name == 'operational_triggers':
            create_operational_triggers()
        else:
            print_out('unrecognized table : ' + table_name)
            return

    if table_name == 'operational_triggers':
        # checks trigger source
        query = "SELECT * FROM "
        query += "  operational_trigger_symbols AS op "
        query += "INNER JOIN "
        query += "  trigger_hierarchies AS trig "
        query += "ON op.source_id = trig.source_id "
        all_trig = db.df_read(query)
        trigger_source = all_trig[all_trig.trigger_sym_id == \
                    df['trigger_sym_id'].values[0]]['trigger_source'].values[0]

        # does not write nd subsurface alerts
        if trigger_source == 'subsurface':
            alert_level = all_trig[all_trig.trigger_sym_id == \
                    df['trigger_sym_id'].values[0]]['alert_level'].values[0]
            if alert_level == -1:
                return
        # if ts does not exist, writes alert; else: updates alert level
        elif trigger_source == 'surficial':

            query = "SELECT trigger_id, trig.trigger_sym_id FROM "
            query += "  (SELECT trigger_sym_id, alert_level, alert_symbol, "
            query += "  op.source_id, trigger_source FROM "
            query += "    operational_trigger_symbols AS op "
            query += "  INNER JOIN "
            query += "    (SELECT * FROM trigger_hierarchies "
            query += "    WHERE trigger_source = '%s' " % trigger_source
            query += "    ) AS trig "
            query += "  ON op.source_id = trig.source_id "
            query += "  ) AS sym "
            query += "INNER JOIN "
            query += "  (SELECT * FROM operational_triggers "
            query += "  WHERE site_id = %s " % df['site_id'].values[0]
            query += "  AND ts = '%s' " % df['ts'].values[0]
            query += "  ) AS trig "
            query += "ON trig.trigger_sym_id = sym.trigger_sym_id"
            surficial = db.df_read(query)

            if len(surficial) == 0:
                data_table = sms.DataTable(table_name, df)
                db.df_write(data_table)
            else:
                trigger_id = surficial['trigger_id'].values[0]
                trigger_sym_id = df['trigger_sym_id'].values[0]
                if trigger_sym_id != surficial['trigger_sym_id'].values[0]:
                    query = "UPDATE %s " % table_name
                    query += "SET trigger_sym_id = '%s' " % trigger_sym_id
                    query += "WHERE trigger_id = %s" % trigger_id
                    db.write(query)

            return

        query = "SELECT * FROM "
        query += "  (SELECT trigger_sym_id, alert_level, alert_symbol, "
        query += "    op.source_id, trigger_source FROM "
        query += "      operational_trigger_symbols AS op "
        query += "    INNER JOIN "
        query += "      (SELECT * FROM trigger_hierarchies "
        query += "      WHERE trigger_source = '%s' " % trigger_source
        query += "      ) AS trig "
        query += "    ON op.source_id = trig.source_id "
        query += "    ) AS sym "
        query += "INNER JOIN "
        query += "  ( "

    else:
        query = ""

    if table_name == 'tsm_alerts':
        where_id = 'tsm_id'
    else:
        where_id = 'site_id'

    ts_updated = pd.to_datetime(
        df['ts_updated'].values[0]) - timedelta(hours=0.5)

    # previous alert
    query += "  SELECT * FROM %s " % table_name
    query += "  WHERE %s = %s " % (where_id, df[where_id].values[0])
    query += "  AND ((ts <= '%s' " % df['ts_updated'].values[0]
    query += "    AND ts_updated >= '%s') " % df['ts_updated'].values[0]
    query += "  OR (ts_updated <= '%s' " % df['ts_updated'].values[0]
    query += "    AND ts_updated >= '%s')) " % ts_updated

    if table_name == 'operational_triggers':

        query += "  ) AS trig "
        query += "ON trig.trigger_sym_id = sym.trigger_sym_id "

    query += "ORDER BY ts DESC LIMIT 1"

    df2 = db.df_read(query)

    if table_name == 'public_alerts':
        query = "SELECT * FROM %s " % table_name
        query += "WHERE site_id = %s " % df['site_id'].values[0]
        query += "AND ts = '%s' " % df['ts'].values[0]
        query += "AND pub_sym_id = %s" % df['pub_sym_id'].values[0]

        df2 = df2.append(db.df_read(query))

    # writes alert if no alerts within the past 30mins
    if len(df2) == 0:
        data_table = sms.DataTable(table_name, df)
        db.df_write(data_table)
    # does not update ts_updated if ts in written ts to ts_updated range
    elif pd.to_datetime(df2['ts_updated'].values[0]) >= \
                  pd.to_datetime(df['ts_updated'].values[0]):
        pass
    # if diff prev alert, writes to db; else: updates ts_updated
    else:
        if table_name == 'tsm_alerts':
            alert_comp = 'alert_level'
            pk_id = 'ta_id'
        elif table_name == 'public_alerts':
            alert_comp = 'pub_sym_id'
            pk_id = 'public_id'
        else:
            alert_comp = 'trigger_sym_id'
            pk_id = 'trigger_id'

        same_alert = df2[alert_comp].values[0] == df[alert_comp].values[0]

        try:
            same_alert = same_alert[0]
        except:
            pass

        if not same_alert:
            data_table = sms.DataTable(table_name, df)
            db.df_write(data_table)
        else:
            query = "UPDATE %s " % table_name
            query += "SET ts_updated = '%s' " % df['ts_updated'].values[0]
            query += "WHERE %s = %s" % (pk_id, df2[pk_id].values[0])
            db.write(query)
コード例 #23
0
ファイル: smsparser.py プロジェクト: dynatech/dynaslope3
def parse_all_messages(args, allmsgs=[]):
    """
    - Processing all messages that came from smsinbox_(loggers/users) and select parsing method dependent to sms message .

    :param args: arguement list of modes and criteria of sms message.
    :param allmsgs: list of all messages that being selected from loggers and users table.
    :type args: obj
    :type allmsgs: obj
    
    Returns:
        bool: True output for success parsing and return
       False if fails.

     
    """
    read_success_list = []
    read_fail_list = []

    print("table:", args.table)

    ref_count = 0

    if allmsgs == []:
        print('Error: No message to Parse')
        sys.exit()


#    total_msgs = len(all_msgs)
#
#    sc = mem.server_config()
#    mc = mem.get_handle()
#    table_sim_nums = mc.get('%s_mobile_sim_nums' % args.table[:-1])

    resource = "sensor_data"

    while allmsgs:
        is_msg_proc_success = True
        print('\n\n*******************************************************')

        sms = allmsgs.pop(0)
        ref_count += 1

        if args.table == 'loggers':
            # start of sms parsing

            if re.search("^[A-Z]{3}X[A-Z]{1}\*U\*", sms.msg):
                df_data = extenso.uts(sms)
                if df_data:
                    print(df_data.data)
                    dbio.df_write(df_data, resource=resource)
                else:
                    is_msg_proc_success = False

            if re.search("^[A-Z]{3}L[A-Z]{1}\*L\*", sms.msg):
                df_data = lidarparser.lidar(sms)
                if df_data:
                    print(df_data.data)
                    dbio.df_write(df_data, resource=resource)
                else:
                    is_msg_proc_success = False
            elif re.search("\*FF", sms.msg) or re.search("PZ\*", sms.msg):
                is_msg_proc_success = process_piezometer(sms)
            # elif re.search("[A-Z]{4}DUE\*[A-F0-9]+\*\d+T?$",sms.msg):
            elif re.search("[A-Z]{4}DUE\*[A-F0-9]+\*.*", sms.msg):
                df_data = subsurface.v1(sms)
                if df_data:
                    print(df_data[0].data, df_data[1].data)
                    dbio.df_write(df_data[0], resource=resource)
                    dbio.df_write(df_data[1], resource=resource)
                    tsm_name = df_data[0].name.split("_")
                    tsm_name = str(tsm_name[1])
                    timestamp = df_data[0].data.reset_index()
                    timestamp = str(timestamp['ts'][0])
                    spawn_alert_gen(tsm_name, timestamp)
                else:
                    print('>> Value Error')
                    is_msg_proc_success = False

            elif re.search("^[A-Z]{4,5}\*[xyabcdXYABCD]\*[A-F0-9]+\*[0-9]+T?$",
                           sms.msg):
                try:
                    df_data = subsurface.v2(sms)
                    if df_data:
                        try:
                            print(df_data.data)
                            dbio.df_write(df_data, resource=resource)
                            tsm_name = df_data.name.split("_")
                            tsm_name = str(tsm_name[1])
                            timestamp = df_data.data.reset_index()
                            timestamp = str(timestamp['ts'][0])
                            spawn_alert_gen(tsm_name, timestamp)
                        except:
                            print('>> SQL Error')
                    else:
                        print('>> Value Error')
                        is_msg_proc_success = False

                except IndexError:
                    print("\n\n>> Error: Possible data type error")
                    print(sms.msg)
                    is_msg_proc_success = False
                except ValueError:
                    print(">> Value error detected")
                    is_msg_proc_success = False
                except MySQLdb.ProgrammingError:
                    print(">> Error writing data to DB")
                    is_msg_proc_success = False

            elif re.search(
                    "^[A-Z]{5}\*[A-Za-z0-9/+]{2}\*[A-Za-z0-9/+]+\*[0-9]{12}$",
                    sms.msg):
                try:
                    df_data = subsurface.b64Parser(sms)
                    if df_data:
                        print(df_data.data)
                        dbio.df_write(df_data, resource=resource)
                        tsm_name = df_data.name.split("_")
                        tsm_name = str(tsm_name[1])
                        timestamp = df_data.data.reset_index()
                        timestamp = str(timestamp['ts'][0])
                        spawn_alert_gen(tsm_name, timestamp)
                    else:
                        print('>>b64 Value Error')
                        is_msg_proc_success = False

                except IndexError:
                    print("\n\n>> Error: Possible data type error")
                    print(sms.msg)
                    is_msg_proc_success = False
                except ValueError:
                    print(">> Value error detected")
                    is_msg_proc_success = False
                except MySQLdb.ProgrammingError:
                    print(">> Error writing data to DB")
                    is_msg_proc_success = False

            elif re.search("[A-Z]{4}\*[A-F0-9]+\*[0-9]+$", sms.msg):
                df_data = subsurface.v1(sms)
                if df_data:
                    print(df_data[0].data, df_data[1].data)
                    dbio.df_write(df_data[0], resource=resource)
                    dbio.df_write(df_data[1], resource=resource)
                    tsm_name = df_data[0].name.split("_")
                    tsm_name = str(tsm_name[1])
                    timestamp = df_data[0].data.reset_index()
                    timestamp = str(timestamp['ts'][0])
                    spawn_alert_gen(tsm_name, timestamp)
                else:
                    print('>> Value Error')
                    is_msg_proc_success = False
            #check if message is from rain gauge
            elif re.search("^\w{4},[\d\/:,]+", sms.msg):
                df_data = rain.v3(sms)
                if df_data:
                    print(df_data.data)
                    dbio.df_write(df_data, resource=resource)
                else:
                    print('>> Value Error')
            elif re.search("ARQ\+[0-9\.\+/\- ]+$", sms.msg):
                try:
                    df_data = rain.rain_arq(sms)
                    if df_data:
                        print(df_data.data)
                        dbio.df_write(df_data, resource=resource)
                    else:
                        print('>> Value Error')
                except:
                    print("Kennex temp fix")
                    pass

            elif (sms.msg.split('*')[0] == 'COORDINATOR'
                  or sms.msg.split('*')[0] == 'GATEWAY'):
                is_msg_proc_success = process_gateway_msg(sms)
            elif common_logger_sms(sms) > 0:
                print('inbox_id: ', sms.inbox_id)
                print('match')
            else:
                print('>> Unrecognized message format: ')
                print('NUM: ', sms.sim_num)
                print('MSG: ', sms.msg)
                is_msg_proc_success = False

        elif args.table == 'users':
            if re.search("EQINFO", sms.msg.upper()):
                data_table = parser.earthquake.eq(sms)
                if data_table:
                    dbio.df_write(data_table, resource=resource)
                else:
                    is_msg_proc_success = False
            elif re.search("ACK \d+ .+", sms.msg.upper()):
                is_msg_proc_success = amsg.process_ack_to_alert(sms)
            elif re.search("^ *(R(O|0)*U*TI*N*E )|(EVE*NT )", sms.msg.upper()):
                is_msg_proc_success = process_surficial_observation(sms)
            else:
                print("User SMS not in known template.", sms.msg)
                is_msg_proc_success = True

        else:
            raise ValueError("Table value not recognized (%s)" % (args.table))
            sys.exit()

        if is_msg_proc_success:
            read_success_list.append(sms.inbox_id)
        else:
            read_fail_list.append(sms.inbox_id)

        print(">> SMS count processed:", ref_count)

        # method for updating the read_status all messages that have been processed
        # so that they will not be processed again in another run
        if ref_count % 200 == 0:
            smstables.set_read_status(read_success_list,
                                      read_status=1,
                                      table=args.table,
                                      host=args.dbhost)
            smstables.set_read_status(read_fail_list,
                                      read_status=-1,
                                      table=args.table,
                                      host=args.dbhost)

            read_success_list = []
            read_fail_list = []

    smstables.set_read_status(sms_id_list=read_success_list,
                              read_status=1,
                              table=args.table,
                              host=args.dbhost)
    smstables.set_read_status(sms_id_list=read_fail_list,
                              read_status=-1,
                              table=args.table,
                              host=args.dbhost)
コード例 #24
0
ファイル: smsparser.py プロジェクト: dynatech/dynaslope3
def process_surficial_observation(sms):
    """
    - Process the sms message that fits for surficial observation and save paserse message to database.

    :param sms: list data info of sms message .
    :type sms: list
    Returns:
        bool: True output for success process and return
       False if fails.

    """
    mc = mem.get_handle()
    surf_mark = mc.get("DF_SURFICIAL_MARKERS")
    reply_msgs = mc.get("surficial_parser_reply_messages")
    sc = mem.server_config()
    ct_sim_num = str(sc["surficial"]["ct_sim_num"])
    enable_analysis = sc["surficial"]["enable_analysis"]
    SEND_REPLY_TO_COMMUNITY = sc["surficial"]["send_reply_to_community"]
    SEND_ACK_TO_CT_PHONE = sc["surficial"]["send_ack_to_ct_phone"]

    resource = "sensor_data"

    obv = []
    try:
        obv = parser.surficial.observation(sms.msg)

    except ValueError as err_val:
        err_val = int(str(err_val))
        mc = mem.get_handle()
        messages = mc.get("surficial_parser_reply_messages")

        # print messages.iloc[err_val - 1].internal_msg
        # print messages.iloc[err_val - 1].external_msg
        sms_msg_for_operations = "{}\n\n{}".format(
            messages.iloc[err_val - 1].internal_msg, sms.msg)
        smstables.write_outbox(sms_msg_for_operations, ct_sim_num)

        return False

    site_surf_mark = surf_mark[surf_mark["site_id"] == obv["obv"]["site_id"]]

    df_meas = pd.DataFrame()
    df_meas = df_meas.from_dict(obv["markers"]["measurements"], orient='index')

    df_meas.columns = ["measurement"]
    markers = site_surf_mark.join(df_meas, on="marker_name", how="outer")

    # send message for unknown marker names
    markers_unk = markers[~(markers["marker_id"] > 0)]
    markers_unk = markers_unk[["marker_name", "measurement"]]
    markers_unk = markers_unk.set_index(["marker_name"])
    markers_unk = markers_unk.to_dict()
    internal_msg = "DEWSL Beta:\n\n%s\n\n" % (sms.msg)
    if len(markers_unk["measurement"].keys()) > 0:
        internal_msg += "%s\n%s\n\n" % (reply_msgs.iloc[13]["internal_msg"],
            "\n".join(["%s = %s" % (key, value) for (key, value) in \
                markers_unk["measurement"].items()]))

    # send message for unreported marker measurements
    markers_nd = markers[~(markers["measurement"] > 0)]
    markers_nd = markers_nd[["marker_name", "measurement"]].to_dict()
    if len(markers_nd["marker_name"].keys()) > 0:
        internal_msg += "%s\n%s" % (reply_msgs.iloc[14]["internal_msg"],
            ", ".join(["%s" % name for name in \
            markers_nd["marker_name"].values()]))

        internal_msg += "\n\n"

    print(">> Updating observations")

    df_obv = pd.DataFrame(obv["obv"], index=[0])

    mo_id = dbio.df_write(data_table=smsclass.DataTable(
        "marker_observations", df_obv),
                          resource=resource,
                          last_insert=True)

    try:
        mo_id = int(mo_id[0][0])
    except (ValueError, TypeError):
        print(
            "Error: conversion of measurement observation id during last insert"
        )
        internal_msg += "\n\nERROR: Resultset conversion"
        smstables.write_outbox(internal_msg, ct_sim_num)
        return False

    print(">> Updating marker measurements")
    if mo_id == 0:
        # Duplicate entry
        query = ("SELECT marker_observations.mo_id FROM marker_observations "
                 "WHERE ts = '{}' and site_id = '{}'".format(
                     obv["obv"]['ts'], obv["obv"]['site_id']))
        mo_id = dbio.read(query, resource=resource)[0][0]

    markers_ok = markers[markers["marker_id"] > 0]
    markers_ok = markers_ok[markers_ok["measurement"] > 0]
    markers_ok_for_report = markers_ok[["marker_name", "measurement"]]
    markers_ok = markers_ok[["marker_id", "measurement"]]
    markers_ok["mo_id"] = mo_id

    markers_ok.columns = ["%s" % (str(col)) for col in markers_ok.columns]

    dbio.df_write(data_table=smsclass.DataTable("marker_data", markers_ok),
                  resource=resource)

    # send success messages
    markers_ok_for_report = markers_ok_for_report.set_index(["marker_name"])
    markers_ok_for_report = markers_ok_for_report.to_dict()

    updated_measurements_str = "\n".join(["%s = %0.2f CM" % (name, meas) \
        for name, meas in markers_ok_for_report["measurement"].items()])

    success_msg = "%s\n%s\n%s" % (
        reply_msgs.iloc[12]["external_msg"],
        dt.strptime(obv["obv"]["ts"], "%Y-%m-%d %H:%M:%S").strftime("%c"),
        updated_measurements_str)

    internal_msg += "Updated measurements:\n%s" % (updated_measurements_str)

    # for ct phone c/o iomp-ct
    if SEND_ACK_TO_CT_PHONE:
        smstables.write_outbox(internal_msg, ct_sim_num)
    # for community who sent the data
    if SEND_REPLY_TO_COMMUNITY:
        smstables.write_outbox(success_msg, sms.sim_num)

    # spawn surficial measurement analysis
    if enable_analysis:
        obv = obv["obv"]
        surf_cmd_line = "python %s %d '%s' > %s 2>&1" % (
            sc['fileio']['gndalert1'], obv['site_id'], obv['ts'],
            sc['fileio']['surfscriptlogs'])
        subprocess.Popen(surf_cmd_line,
                         stdout=subprocess.PIPE,
                         shell=True,
                         stderr=subprocess.STDOUT)

    return True
コード例 #25
0
def summary_writer(site_id, site_code, gauge_name, rain_id, twoyrmax, halfmax,
                   rainfall, end, write_alert):
    """Summary of cumulative rainfall, threshold, alert and rain gauge used in
    analysis of rainfall.
    
    Args:
        site_id (int): ID per site.
        site_code (str): Three-letter code per site.
        gauge_name (str): Rain gauge used in rainfall analysis.
        rain_id (int): ID of gauge_name.
        twoyrmax (float): Threshold for 3-day cumulative rainfall per site.
        halfmax (float): Threshold for 1-day cumulative rainfall per site.
        rainfall (str): Data to compute cumulative rainfall from.
        end (datetime): End timestamp of alert to be computed.
        write_alert (bool): To write alert in database.

    Returns:
        dataframe: Summary of cumulative rainfall, threshold, alert and 
                   rain gauge used in analysis of rainfall.
    
    """

    one, three = one_three_val_writer(rainfall, end)

    #threshold is reached
    if one >= halfmax or three >= twoyrmax:
        ralert = 1
    #no data
    elif one == None or math.isnan(one):
        ralert = -1
    #rainfall below threshold
    else:
        ralert = 0

    if write_alert or ralert == 1:
        if qdb.does_table_exist('rainfall_alerts') == False:
            #Create a site_alerts table if it doesn't exist yet
            qdb.create_rainfall_alerts()

        columns = ['rain_alert', 'cumulative', 'threshold']
        alerts = pd.DataFrame(columns=columns)

        if ralert == 0:
            if one >= halfmax * 0.75:
                temp_df = pd.Series(['x', one, halfmax], index=columns)
            elif three >= twoyrmax * 0.75:
                temp_df = pd.Series(['x', three, twoyrmax], index=columns)
            else:
                temp_df = pd.Series([False, np.nan, np.nan], index=columns)

            alerts = alerts.append(temp_df, ignore_index=True, sort=False)
        else:
            if one >= halfmax:
                temp_df = pd.Series(['a', one, halfmax], index=columns)
                alerts = alerts.append(temp_df, ignore_index=True, sort=False)
            if three >= twoyrmax:
                temp_df = pd.Series(['b', three, twoyrmax], index=columns)
                alerts = alerts.append(temp_df, ignore_index=True, sort=False)
            if ralert == -1:
                temp_df = pd.Series([False, np.nan, np.nan], index=columns)
                alerts = alerts.append(temp_df, ignore_index=True, sort=False)

        if alerts['rain_alert'][0] != False:
            for index, row in alerts.iterrows():
                rain_alert = row['rain_alert']
                cumulative = row['cumulative']
                threshold = row['threshold']
                if qdb.does_alert_exists(site_id, end,
                                         rain_alert).values[0][0] == 0:
                    df = pd.DataFrame({
                        'ts': [end],
                        'site_id': [site_id],
                        'rain_id': [rain_id],
                        'rain_alert': [rain_alert],
                        'cumulative': [cumulative],
                        'threshold': [threshold]
                    })
                    data_table = sms.DataTable('rainfall_alerts', df)
                    db.df_write(data_table)

    summary = pd.DataFrame({
        'site_id': [site_id],
        'site_code': [site_code],
        '1D cml': [one],
        'half of 2yr max': [round(halfmax, 2)],
        '3D cml': [three],
        '2yr max': [round(twoyrmax, 2)],
        'DataSource': [gauge_name],
        'rain_id': [rain_id],
        'alert': [ralert]
    })

    return summary
コード例 #26
0
ファイル: querydb.py プロジェクト: dynatech/updews-pycodes
def write_marker_alerts(df, connection='analysis'):
    data_table = sms.DataTable('marker_alerts', df)
    db.df_write(data_table, connection=connection)
コード例 #27
0
ファイル: querydb.py プロジェクト: dynatech/updews-pycodes
def write_rain_data(gauge_name, df, connection='analysis'):
    data_table = sms.DataTable(gauge_name, df)
    db.df_write(data_table, connection=connection)
コード例 #28
0
ファイル: querydb.py プロジェクト: dynatech/updews-pycodes
def write_rain_gauges(df, connection='analysis'):
    data_table = sms.DataTable('rainfall_gauges', df)
    db.df_write(data_table, connection=connection)