示例#1
0
def send_unsent_notif(df, notif_type, curr_release, validation=False):
    ts = curr_release.strftime('%I%p %B %d, %Y')
    if len(df) != 0:
        site_notif = '\n'.join(list(map(lambda x: ': '.join(x), df.values)))
        if validation:
            sms_msg = 'Validate measurements with displacement of 1cm and more:\n\n' + site_notif
        else:
            sms_msg = 'Unsent ' + notif_type + ' (' + ts + '):\n\n' + site_notif
        smsoutbox_user_status = get_recipient(curr_release)
    else:
        if notif_type == 'gndmeas':
            return
        sms_msg = 'Sent all ' + notif_type + ' (' + ts + ')'
        smsoutbox_user_status = get_recipient(curr_release, unsent=False)
    smsoutbox_users = pd.DataFrame({
        'sms_msg': [sms_msg],
        'source': ['central']
    })
    data_table = sms.DataTable('smsoutbox_users', smsoutbox_users)
    outbox_id = db.df_write(data_table, connection='gsm_pi',
                            last_insert=True)[0][0]

    smsoutbox_user_status.loc[:, 'outbox_id'] = outbox_id
    data_table = sms.DataTable('smsoutbox_user_status', smsoutbox_user_status)
    db.df_write(data_table, connection='gsm_pi')
示例#2
0
def earthquake(site_id, ts):
    """Insert values to earthquake_events, earthquake_alerts, and 
    operational_triggers to (re)trigger subsurface alert.
    
    Args:
        site_id (int): ID of site to compute earthquake analysis for.
        ts (datetime): Timestamp of alert trigger.
    """

    # writes to earthquake_events; defaults epicenter to site coordinates, depth to 0, and magnitude to 4.3
    sites = eq.get_sites()
    earthquake_events = sites.loc[sites.site_id == site_id, ['latitude', 'longitude', 'province']]
    earthquake_events.loc[:, 'ts'] = ts
    earthquake_events.loc[:, 'magnitude'] = 4.3
    earthquake_events.loc[:, 'depth'] = 0
    earthquake_events.loc[:, 'critical_distance'] = np.round(eq.get_crit_dist(4.3), decimals=2)
    earthquake_events.loc[:, 'issuer'] = 'TOPSSOFTWAREINFRA'
    earthquake_events.loc[:, 'processed'] = 1
    eq_id = int(db.df_write(data_table = sms.DataTable("earthquake_events", earthquake_events), resource='sensor_data', last_insert=True)[0][0])
    
    # writes to earthquake_alerts
    earthquake_alerts = pd.DataFrame({'eq_id': [eq_id], 'site_id': [site_id], 'distance': [0]})
    db.df_write(data_table = sms.DataTable("earthquake_alerts", earthquake_alerts), resource='sensor_data')
    
    # writes to operational_triggers
    trigger_symbol = mem.get('df_trigger_symbols')
    trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'earthquake') & (trigger_symbol.alert_level == 1), 'trigger_sym_id'].values[0]
    operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]})
    qdb.alert_to_db(operational_trigger, 'operational_triggers')
    
    # details for trigger tech info
    earthquake_events.loc[:, 'distance'] = 0

    return earthquake_events
示例#3
0
def main():
    """Writes in rainfall_gauges information on available rain gauges 
     for rainfall alert analysis

    """

    start = datetime.now()
    qdb.print_out(start)

    if qdb.does_table_exist('rainfall_gauges') == False:
        #Create a rainfall_gauges table if it doesn't exist yet
        qdb.create_rainfall_gauges()

    senslope = mem.get('df_dyna_rain_gauges')
    senslope = senslope.loc[senslope.has_rain == 1, :]
    senslope.loc[:, 'data_source'] = 'senslope'

    noah = noah_gauges()

    all_gauges = senslope.append(noah, sort=False)
    all_gauges.loc[:, 'gauge_name'] = all_gauges.loc[:, 'gauge_name'].apply(
        lambda x: str(x))
    all_gauges.loc[:, 'date_activated'] = pd.to_datetime(
        all_gauges.loc[:, 'date_activated'])
    written_gauges = mem.get('df_rain_gauges')
    not_written = set(all_gauges['gauge_name']) \
                     - set(written_gauges['gauge_name'])

    new_gauges = all_gauges.loc[all_gauges.gauge_name.isin(not_written), :]
    new_gauges = new_gauges.loc[new_gauges.date_deactivated.isnull(), :]
    new_gauges = new_gauges.loc[:, [
        'gauge_name', 'data_source', 'longitude', 'latitude', 'date_activated'
    ]]
    if len(new_gauges) != 0:
        data_table = sms.DataTable('rainfall_gauges', new_gauges)
        db.df_write(data_table)

    deactivated = written_gauges.loc[
        ~written_gauges.date_deactivated.isnull(), :]

    deactivated_gauges = all_gauges.loc[(~all_gauges.date_deactivated.isnull()) \
                                  & (~all_gauges.gauge_name.isin(not_written))\
                                  & (~all_gauges.gauge_name.isin(deactivated.gauge_name)), :]
    date_deactivated = pd.to_datetime(
        deactivated_gauges.loc[:, 'date_deactivated'])
    deactivated_gauges.loc[:, 'date_deactivated'] = date_deactivated
    deactivated_gauges = deactivated_gauges.loc[:, [
        'gauge_name', 'data_source', 'longitude', 'latitude', 'date_activated'
    ]]
    if len(deactivated_gauges) != 0:
        data_table = sms.DataTable('rainfall_gauges', deactivated_gauges)
        db.df_write(data_table)

    qdb.print_out('runtime = %s' % (datetime.now() - start))
示例#4
0
def subsurface(site_id, ts, alert_level):
    """Insert values to node_alerts, tsm_alerts, and operational_triggers
    to (re)trigger subsurface alert.
    
    Args:
        site_id (int): ID of site to compute subsurface analysis for.
        ts (datetime): Timestamp of alert trigger.
        alert_level (int: {0, 2, 3}, default None): Subsurface alert level.
    """
    
    # get tsm_id
    query = "SELECT tsm_id FROM tsm_sensors "
    query += "where site_id = {} ".format(site_id)
    query += "and (date_deactivated is null or date_deactivated > '{}')".format(ts)
    tsm_id = db.df_read(query, resource='sensor_data').values.flatten()
    tsm_id = random.choice(tsm_id)
    
    # writes to node_alerts; defaults to node 1 and vel_alert
    ts_list = pd.date_range(end=ts, freq='30min', periods=4)
    node_alerts = pd.DataFrame({'ts': ts_list, 'node_id': [1]*len(ts_list),
                                'tsm_id': [tsm_id]*len(ts_list),
                                'disp_alert': [0]*len(ts_list),
                                'vel_alert': [alert_level]*len(ts_list)})
    db.df_write(data_table = sms.DataTable("node_alerts", node_alerts), resource='sensor_data')
    
    # writes to tsm_alerts
    tsm_alerts = pd.DataFrame({'ts': [ts], 'tsm_id': [tsm_id],
                               'alert_level': [alert_level],
                               'ts_updated': [ts]})
    db.df_write(data_table = sms.DataTable("tsm_alerts", tsm_alerts), resource='sensor_data')

    # writes to operational_triggers
    trigger_symbol = mem.get('df_trigger_symbols')
    trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'subsurface') & (trigger_symbol.alert_level == alert_level), 'trigger_sym_id'].values[0]
    operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]})
    qdb.alert_to_db(operational_trigger, 'operational_triggers')

    # details for trigger tech info
    tsm_alerts.loc[:, 'node_id'] = 1
    tsm_alerts.loc[:, 'disp_alert'] = 0
    tsm_alerts.loc[:, 'vel_alert'] = alert_level
    
    return tsm_alerts
示例#5
0
def main(key):
    ts = datetime.now()
    sheet_name = ts.strftime('%B %Y')
    shift_sched = get_shift(key, sheet_name)
    try:
        sheet_name = (ts+timedelta(weeks=2)).strftime('%B %Y')
        shift_sched = shift_sched.append(get_shift(key, sheet_name))
    except:
        print("no shift schedule for next month")
    data_table = sms.DataTable('monshiftsched', shift_sched)
    db.df_write(data_table)
示例#6
0
def send_unsent_notif(df, curr_release):
    ts = curr_release.strftime('%I%p %B %d, %Y')
    if len(df) != 0:
        unsent_ewi = '\n'.join(list(map(lambda x: ': '.join(x), df.values)))
        sms_msg = 'Unsent EWI SMS (' + ts + '):\n\n' + unsent_ewi
        smsoutbox_user_status = get_recipient(curr_release)
    else:
        sms_msg = 'Sent all EWI SMS (' + ts + ')'
        smsoutbox_user_status = get_recipient(curr_release, unsent=False)
    smsoutbox_users = pd.DataFrame({
        'sms_msg': [sms_msg],
        'source': ['central']
    })
    data_table = sms.DataTable('smsoutbox_users', smsoutbox_users)
    outbox_id = db.df_write(data_table, connection='gsm_pi',
                            last_insert=True)[0][0]

    smsoutbox_user_status.loc[:, 'outbox_id'] = outbox_id
    data_table = sms.DataTable('smsoutbox_user_status', smsoutbox_user_status)
    db.df_write(data_table, connection='gsm_pi')
示例#7
0
def send_reminder(ts = datetime.now()):
    shift_ts = release_time(ts)+timedelta(0.5)
    
    query = """SELECT * FROM monshiftsched
            WHERE ts = '{}'""".format(shift_ts)
    df = db.df_read(query)
    df = df.rename(columns={'iompmt': 'MT', 'iompct': 'CT'})
    
    sched = (shift_ts-timedelta(hours=0.25)).strftime("%B %d, %Y %I:%M%p")
    greeting = ts.strftime("%p")
    if greeting == 'AM':
        greeting = 'morning'
    else:
        greeting = 'evening'
        
    query = """SELECT site_code FROM commons_db.sites WHERE active = 1 
            AND site_id NOT IN (SELECT site_id FROM analysis_db.markers WHERE in_use = 1)"""
    sites = ', '.join(db.df_read(query, resource= "sensor_analysis").values.flatten()).upper()
    sites = ', and'.join(sites.rsplit(',', 1))
    
    IOMP_dict = df.loc[:, ['MT', 'CT']].to_dict(orient='records')[0]
    IOMP_num = get_mobile()
    for IOMP, name in IOMP_dict.items():
        sms_msg = ("Monitoring shift reminder:\n\n"
                "Good {} {}, you are assigned to be the IOMP-{} for {}.\n\n"
                "Please be reminded that sites {} currently do not have markers installed. "
                "Instead of asking for ground measurement, please ask for "
                "ground observation.").format(greeting, name, IOMP, sched, sites)
        print(sms_msg, '\n')
        outbox = pd.DataFrame({'sms_msg': [sms_msg], 'source': ['central']})
        mobile_id = IOMP_num.loc[IOMP_num.nickname == name, 'mobile_id'].values
        gsm_id = IOMP_num.loc[IOMP_num.nickname == name, 'gsm_id'].values
        if len(mobile_id) != 0 and len(gsm_id) != 0:
            data_table = sms.DataTable('smsoutbox_users', outbox)
            outbox_id = db.df_write(data_table, resource='sms_data', last_insert=True)[0][0]
            status = pd.DataFrame({'outbox_id': [outbox_id]*len(mobile_id), 'mobile_id': mobile_id,
                                   'gsm_id': gsm_id})
            data_table = sms.DataTable('smsoutbox_user_status', status)
            db.df_write(data_table, resource='sms_data')
        else:
            print("No mobile number")
示例#8
0
def main():
    query = 'SELECT * FROM markers'
    markers = db.df_read(query)
    query = "SELECT * FROM marker_observations"
    mo = db.df_read(query)
    query = "SELECT * FROM marker_data"
    md = db.df_read(query)
    query = "SELECT ma_id, ts, marker_id FROM marker_alerts"
    ma = db.df_read(query)
    
    marker_alerts = pd.merge(ma, markers, on='marker_id', validate='m:1')
    marker_alerts = pd.merge(marker_alerts, mo, on=['site_id', 'ts'],
                             validate='m:1')
    marker_alerts = pd.merge(marker_alerts, md, on=['mo_id', 'marker_id'],
                             validate='m:1')
    marker_alerts = marker_alerts.drop_duplicates(['ts', 'marker_id'],
                                                  keep='last')
    
    # delete marker_alerts not in marker_observations and duplicated marker_alerts
    ma_id = set(ma['ma_id']) - set(marker_alerts['ma_id'])
    if len(ma_id) != 0:
        query = 'DELETE FROM marker_alerts WHERE ma_id in %s' %str(tuple(ma_id))
        qdb.execute_query(query)
    
    try:
        query = 'ALTER TABLE marker_alerts ADD UNIQUE INDEX uq_marker_alerts (marker_id ASC, ts ASC)'
        qdb.execute_query(query)
    except:
        pass
    
    try:
        query =  "ALTER TABLE marker_alerts "
        query += "ADD UNIQUE INDEX uq_marker_alerts1 (data_id ASC); "
        qdb.execute_query(query)
    except:
        pass
    
    try:
        query =  "ALTER TABLE marker_alerts "
        query += "ADD CONSTRAINT fk_marker_data "
        query += "  FOREIGN KEY (data_id) "
        query += "  REFERENCES marker_data (data_id) "
        query += "  ON DELETE CASCADE "
        query += "  ON UPDATE CASCADE; "
        qdb.execute_query(query)
    except:
        pass
    
    data_table = sms.DataTable('marker_alerts',
                               marker_alerts[['ts', 'marker_id', 'data_id']])
    db.df_write(data_table)
示例#9
0
def send_notif(ts=datetime.now()):
    start = lib.release_time(ts) - timedelta(hours=4)
    if (ts - start).total_seconds() / 3600 < 1.5:
        notif = olivia.main(ts)
        sms_msg = '\n'.join(
            list(filter(lambda x: x.startswith('Un'), notif.split('\nNo '))))
    else:
        sms_msg = gndmeas.main(ts)
    if sms_msg != '':
        smsoutbox_user_status = get_recipient(ts)
        smsoutbox_users = pd.DataFrame({
            'sms_msg': [sms_msg],
            'source': ['central']
        })
        data_table = sms.DataTable('smsoutbox_users', smsoutbox_users)
        outbox_id = db.df_write(data_table,
                                connection='gsm_pi',
                                last_insert=True)[0][0]

        smsoutbox_user_status.loc[:, 'outbox_id'] = outbox_id
        data_table = sms.DataTable('smsoutbox_user_status',
                                   smsoutbox_user_status)
        db.df_write(data_table, connection='gsm_pi')
def write_observation(surf_df, site_id):
    mo_df = surf_df.loc[:, ['site_id', 'ts', 'meas_type', 'observer_name']]
    mo_df.loc[:, 'data_source'] = 'ops'
    mo_df.loc[:, 'reliability'] = 1
    mo_df.loc[:, 'weather'] = 'maaraw'
    mo_id = dbio.df_write(data_table=smsclass.DataTable(
        "marker_observations", mo_df),
                          resource='sensor_data',
                          last_insert=True)[0][0]
    if mo_id == 0:
        query = "SELECT marker_observations.mo_id FROM marker_observations "
        query += "WHERE ts = '{ts}' and site_id = '{site_id}'"
        query = query.format(ts=surf_df['ts'].values[0], site_id=site_id)
        mo_id = dbio.read(query, resource='sensor_data')[0][0]
    surf_df = surf_df.dropna(axis=1)
    md_df = surf_df.loc[:,
                        surf_df.columns.astype(str).str.isnumeric()].transpose(
                        )
    md_df = md_df.reset_index()
    md_df.columns = ['marker_id', 'measurement']
    md_df.loc[:, 'mo_id'] = mo_id
    dbio.df_write(data_table=smsclass.DataTable("marker_data", md_df),
                  resource='sensor_data')
    ma.generate_surficial_alert(site_id, ts=mo_df.ts.values[0])
示例#11
0
def update_table_data(noah_id, gauge_name, fdate, tdate, noah_gauges):
    """Updates data of table gauge_name from fdate to tdate.
    
    Args:
        noah_id (int): Device id of noah data.
        gauge_name (str): Name of table containing rainfall data of noah_id.
        fdate (timestamp): Timestamp start of data to be downloaded.
        tdate (timestamp): Timestamp end of data to be downloaded.
        noah_gauges (dataframe): Rain gauge properties- id, name, data source.

    """

    noah_data = download_rainfall_noah(noah_id, fdate, tdate)
    cur_ts = datetime.now()

    if noah_data.empty:
        qdb.print_out("    no data...")

        #Insert an entry with values: [timestamp, -1] as a marker
        #-1 values should not be included in computation of cml rainfall
        if pd.to_datetime(tdate) <= cur_ts:
            place_holder_data = pd.DataFrame({"ts": [tdate], "rain": [-1.0]})
            data_table = sms.DataTable(gauge_name, place_holder_data)
            db.df_write(data_table)

    else:
        #Insert the new data on the noahid table
        data_table = sms.DataTable(gauge_name, noah_data)
        db.df_write(data_table)

    #The table is already up to date
    if pd.to_datetime(tdate) > cur_ts:
        return
    else:
        #call this function again until the maximum recent timestamp is hit
        update_single_table(noah_gauges)
示例#12
0
def generate_surficial_alert(site_id = None,ts = None):
    """
    Main alert generating function for surificial alert for a site at specified time
    
    Parameters
    ------------------
    site_id: int
        site_id of site of interest
    ts: timestamp
        timestamp of alert generation
        
    Returns
    -------------------
    Prints the generated alert and writes to marker_alerts database
    """
    #### Obtain system arguments from command prompt
    if site_id == None and ts == None:
        site_id, ts = sys.argv[1].lower(),sys.argv[2].lower()
    
    #### Config variables
    num_pts = int(sc['surficial']['surficial_num_pts'])
    ts_start = pd.to_datetime(ts) - timedelta(sc['surficial']['meas_plot_window'])

    #### Get latest ground data
    surficial_data_df = qdb.get_surficial_data(site_id, ts_start, ts, num_pts)
    
    #### Generate Marker alerts
    marker_data_df = surficial_data_df.groupby('marker_id',as_index = False)
    marker_alerts = marker_data_df.apply(evaluate_marker_alerts, ts)

    #### Write to marker_alerts table    
    data_table = sms.DataTable('marker_alerts', marker_alerts)
    db.df_write(data_table)

    #### Generate surficial alert for site
    surficial_alert = get_surficial_alert(marker_alerts,site_id)
    #### Write to db
    qdb.alert_to_db(surficial_alert,'operational_triggers')
    
    #### Plot current ground meas    
    if sc['surficial']['print_meas_plot']:
        ### Retreive the surficial data to plot
        surficial_data_to_plot = surficial_data_df.loc[surficial_data_df.ts >= ts_start, :]
        ### Plot the surficial data
        plot_site_meas(surficial_data_to_plot, ts)
    
    return surficial_data_df
示例#13
0
def to_mysql(df):
    """Writes in rainfall_priorities the distance of 4 active nearby
    rain gauges from the site.
    
    Args:
        df (dataframe): Record of 4 nearby rain gauges with 
        its distance from the site.

    """
    written_df = mem.get('df_rain_priorities')
    combined = written_df.append(df, ignore_index=True, sort=False)
    combined = combined.append(written_df, ignore_index=True, sort=False)
    combined = combined.drop_duplicates(['site_id', 'rain_id'], keep=False)

    if len(combined) > 0:
        data_table = sms.DataTable('rainfall_priorities', combined)
        db.df_write(data_table)
示例#14
0
def main(hours=''):
    auth_api = get_auth()
    username = '******'
    
    if hours == '':
        try:
            hours = int(sys.argv[1])
        except:
            hours = 0.25
    end_date = datetime.utcnow() - timedelta(hours=hours)

    for status in Cursor(auth_api.user_timeline, id=username).items():
        text = auth_api.get_status(status.id, tweet_mode="extended").full_text
        if 'earthquake' in text.lower():
            try:
                print(status.created_at)
                df = get_eq_events(text)
                data_table = sms.DataTable('earthquake_events', df)
                db.df_write(data_table)
            except:
                pass
    
        if status.created_at < end_date:
            break
示例#15
0
def drift_detection(acc_id="", f_time=pd.to_datetime(dt.now() - td(weeks=12))):
    accelerometers = memory.get('DF_ACCELEROMETERS')
    acc_det = accelerometers[accelerometers.accel_id == acc_id].iloc[0]

    try:
        df = q.get_raw_accel_data(tsm_id=acc_det.tsm_id,
                                  node_id=acc_det.node_id,
                                  accel_number=acc_det.accel_number,
                                  from_time=f_time)
    #lagpas yung node_id
    except ValueError:
        return 0
    #walang table ng tilt_***** sa db
    except AttributeError:
        return 0

    #walang laman yung df
    if df.empty:
        return 0

    #Resample 30min
    df = df.set_index('ts').resample('30min').first()

    #Integer index
    N = len(df.index)
    df['i'] = range(1, N + 1, 1)

    # Compute accelerometer raw value
    df.x[df.x < -2048] = df.x[df.x < -2048] + 4096
    df.y[df.y < -2048] = df.y[df.y < -2048] + 4096
    df.z[df.z < -2048] = df.z[df.z < -2048] + 4096

    # Compute accelerometer magnitude
    df['mag'] = ((df.x / 1024) * (df.x / 1024) + (df.y / 1024) *
                 (df.y / 1024) + (df.z / 1024) * (df.z / 1024)).apply(np.sqrt)

    #count number of data
    dfw = pd.DataFrame()
    dfw['count'] = df.mag.resample('1W').count()

    # Filter data with very big/small magnitude
    df[df.mag > 3.0] = np.nan
    df[df.mag < 0.5] = np.nan

    # Compute mean and standard deviation in time frame
    df['ave'] = df.mag.rolling(window=12, center=True).mean()
    df['stdev'] = df.mag.rolling(window=12, center=True).std()

    # Filter data with outlier values in time frame
    df[(df.mag > df.ave + 3 * df.stdev) & (df.stdev != 0)] = np.nan
    df[(df.mag < df.ave - 3 * df.stdev) & (df.stdev != 0)] = np.nan

    #interpolate missing data
    df = df.interpolate()

    # Resample every six hours
    df = df.resample('6H').mean()

    # Recompute standard deviation after resampling
    df.stdev = df.mag.rolling(window=2, center=False).std()
    df.stdev = df.stdev.shift(-1)
    df.stdev = df.stdev.rolling(window=2, center=False).mean()

    # Filter data with large standard deviation
    df[df.stdev > 0.05] = np.nan

    # Compute velocity and acceleration of magnitude
    df['vel'] = df.mag - df.mag.shift(1)
    df['acc'] = df.vel - df.vel.shift(1)

    #Resample 1week
    dfw['vel_week'] = df.vel.resample('1W').mean()
    dfw['acc_week'] = df.acc.resample('1W').mean()
    dfw['corr'] = df.resample('1W').mag.corr(df.i)
    dfw['corr'] = dfw['corr']**2

    # Get the data that exceeds the threshold value
    dfw = dfw[(abs(dfw['acc_week']) > 0.000003) & (dfw['corr'] > 0.7) &
              (dfw['count'] >= 84)]

    #Compute the difference for each threshold data
    if len(dfw) > 0:
        dfw = dfw.reset_index()
        dfw['diff_TS'] = dfw.ts - dfw.ts.shift(1)
        dfw['sign'] = dfw.vel_week * dfw.vel_week.shift(1)

    #Check if there are 4 weeks consecutive threshold data
    week = 1
    days = td(days=0)
    while days < td(days=28) and week < len(dfw.index):
        if ((dfw.loc[week]['diff_TS'] <= td(days=14)) &
            (dfw.loc[week]['sign'] > 0)):
            days = days + dfw.loc[week]['diff_TS']
        else:
            days = td(days=0)
        week = week + 1

    if days >= td(days=28):
        print(acc_id, dfw.ts[week - 1])

        #    df['mag'].plot()
        #    plt.savefig(OutputFP+col+nids+a+"-mag")
        #    plt.close()

        dft = pd.DataFrame(columns=['accel_id', 'ts_identified'])
        dft.loc[0] = [acc_id, dfw.ts[week - 1]]

        #save to db
        db.df_write(smsclass.DataTable("drift_detection", dft))

        print("very nice!")
示例#16
0
def main():

    eq_events = get_unprocessed()
    sym = get_alert_symbol()
    sites = get_sites()
    dfg = sites.groupby('site_id')
    eq_a = pd.DataFrame(columns=['site_id', 'eq_id', 'distance'])
    EVENTS_TABLE = 'earthquake_events'

    for i in eq_events.index:
        cur = eq_events.loc[i]

        mag = cur.magnitude
        eq_lat = cur.latitude
        eq_lon = cur.longitude
        ts = cur.ts

        critdist = get_crit_dist(mag)
        print(critdist)
        if False in np.isfinite([mag, eq_lat,
                                 eq_lon]):  #has NaN value in mag, lat, or lon
            query = "UPDATE %s SET processed = -1 where eq_id = %s" % (
                EVENTS_TABLE, i)
            dynadb.write(query=query, resource="sensor_data")
            continue

        if mag < 4:
            print("> Magnitude too small: %d" % (mag))
            query = "UPDATE %s SET processed = 1 where eq_id = %s" % (
                EVENTS_TABLE, i)
            dynadb.write(query=query, resource="sensor_data")
            continue
        else:
            print("> Magnitude reached threshold: %d" % (mag))

        # magnitude is big enough to consider
        sites = dfg.apply(get_distance_to_eq, eq_lat=eq_lat, eq_lon=eq_lon)
        print(sites)
        crits = sites.loc[sites.distance <= critdist, :]

        if len(crits) == 0:
            print("> No affected sites. ")
            query = "UPDATE %s SET processed = 1, critical_distance = %s where eq_id = %s" % (
                EVENTS_TABLE, critdist, i)
            dynadb.write(query=query, resource="sensor_data")
            continue
        else:
            #merong may trigger
            print(">> Possible sites affected: %d" %
                  (len(crits.site_id.values)))

        crits.loc[:, 'ts'] = ts
        crits.loc[:, 'source'] = 'earthquake'
        crits.loc[:, 'trigger_sym_id'] = sym
        crits.loc[:, 'ts_updated'] = ts
        crits.loc[:, 'eq_id'] = i

        eq_a = crits.loc[:, ['eq_id', 'site_id', 'distance']]
        op_trig = crits.loc[:,
                            ['ts', 'site_id', 'trigger_sym_id', 'ts_updated']]

        # write to tables
        data_table = sms.DataTable("operational_triggers", op_trig)
        dynadb.df_write(data_table)
        data_table = sms.DataTable("earthquake_alerts", eq_a)
        dynadb.df_write(data_table)

        query = "UPDATE %s SET processed = 1, critical_distance = %s where eq_id = %s " % (
            EVENTS_TABLE, critdist, i)
        dynadb.write(query=query, resource="sensor_data")

        print(">> Alert iniated.\n")
示例#17
0
def surficial(site_id, ts, alert_level):
    """Insert values to marker_observations, marker_data, marker_alerts, and 
    operational_triggers to (re)trigger surficial alert.
    
    Args:
        site_id (int): ID of site to compute surficial analysis for.
        ts (datetime): Timestamp of alert trigger.
        alert_level (int: {0, 1, 2, 3}, default None): Surficial alert level.
    """

    # get last data for site_id
    conn = mem.get('DICT_DB_CONNECTIONS')
    query = "SELECT ts, marker_id, marker_name, measurement "
    query += "FROM {analysis}.marker_observations "
    query += "INNER JOIN {common}.sites USING (site_id) "
    query += "INNER JOIN {analysis}.marker_data using (mo_id) "
    query += "INNER JOIN (SELECT data_id, displacement, time_delta, alert_level, processed FROM {analysis}.marker_alerts) sub1 USING (data_id) "
    query += "INNER JOIN (SELECT marker_id, marker_name FROM {analysis}.view_marker_history) sub2 USING (marker_id) "
    query += "WHERE site_id = {site_id} "
    query += "AND ts IN ( "
    query += "  SELECT MAX(ts) FROM {analysis}.marker_observations "
    query += "  WHERE ts < '{ts}' "
    query += "    AND site_id = {site_id})"
    query = query.format(analysis=conn['analysis']['schema'], common=conn['common']['schema'], site_id=site_id, ts=ts)
    df = db.df_read(query, resource='sensor_analysis')
        
    # compute diff in measurements to reach threshold
    if alert_level == 3:
        rate = 1.8
    elif alert_level in (1,2):
        rate = 0.25
    else:
        rate = 0
    meas_diff = np.ceil(rate * (ts-df.ts[0]).total_seconds()/3600)

    # input measurements in inbox
    gndmeas = df.loc[:, ['marker_id', 'marker_name', 'measurement']]
    gndmeas.loc[:, 'ts'] = ts
    gndmeas.loc[:, 'measurement'] += meas_diff
    if alert_level == 1:
        temp_gndmeas = gndmeas.copy()
        temp_gndmeas.loc[:, 'ts'] -= (ts - df.ts[0])/2
        temp_gndmeas.loc[:, 'measurement'] += meas_diff
        # filler measurement for alert level 1
        df_obv = pd.DataFrame({'meas_type': ['ROUTINE'], 'site_id': [site_id],
                               'weather': ['MAARAW'], 'observer_name':['TOPSSOFTWAREINFRA'],
                               'reliability': [1], 'data_source': ['SMS'],
                               'ts': [temp_gndmeas.ts[0]]})
        mo_id = int(db.df_write(data_table=sms.DataTable("marker_observations", 
            df_obv), resource='sensor_data', last_insert=True)[0][0])
        temp_gndmeas.loc[:, 'mo_id'] = mo_id
        df_data = temp_gndmeas.loc[:, ['mo_id', 'marker_id', 'measurement']]
        db.df_write(data_table = sms.DataTable("marker_data", df_data), resource='sensor_data')
        surf.generate_surficial_alert(site_id = site_id, ts = temp_gndmeas.ts[0])
    # measurement for ts given
    df_obv = pd.DataFrame({'meas_type': ['ROUTINE'], 'site_id': [site_id],
                           'weather': ['MAARAW'], 'observer_name':['TOPSSOFTWAREINFRA'],
                           'reliability': [1], 'data_source': ['SMS'],
                           'ts': [ts]})
    mo_id = int(db.df_write(data_table=sms.DataTable("marker_observations", 
        df_obv), resource='sensor_data', last_insert=True)[0][0])
    gndmeas.loc[:, 'mo_id'] = mo_id
    df_data = gndmeas.loc[:, ['mo_id', 'marker_id', 'measurement']]
    db.df_write(data_table = sms.DataTable("marker_data", df_data), resource='sensor_data')
    surf.generate_surficial_alert(site_id = site_id, ts = ts)
    
    # details for trigger tech info
    time_delta = np.round((ts - df.ts[0]).total_seconds()/3600, 2)
    if alert_level == 1:
        time_delta /= 2
    gndmeas.loc[:, 'displacement'] = meas_diff
    gndmeas.loc[:, 'time_delta'] = time_delta
    gndmeas.loc[:, 'alert_level'] = alert_level

    # writes to operational_triggers
    trigger_symbol = mem.get('df_trigger_symbols')
    trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'surficial') & (trigger_symbol.alert_level == alert_level), 'trigger_sym_id'].values[0]
    operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]})
    qdb.alert_to_db(operational_trigger, 'operational_triggers')
    
    return gndmeas