def earthquake(site_id, ts): """Insert values to earthquake_events, earthquake_alerts, and operational_triggers to (re)trigger subsurface alert. Args: site_id (int): ID of site to compute earthquake analysis for. ts (datetime): Timestamp of alert trigger. """ # writes to earthquake_events; defaults epicenter to site coordinates, depth to 0, and magnitude to 4.3 sites = eq.get_sites() earthquake_events = sites.loc[sites.site_id == site_id, ['latitude', 'longitude', 'province']] earthquake_events.loc[:, 'ts'] = ts earthquake_events.loc[:, 'magnitude'] = 4.3 earthquake_events.loc[:, 'depth'] = 0 earthquake_events.loc[:, 'critical_distance'] = np.round(eq.get_crit_dist(4.3), decimals=2) earthquake_events.loc[:, 'issuer'] = 'TOPSSOFTWAREINFRA' earthquake_events.loc[:, 'processed'] = 1 eq_id = int(db.df_write(data_table = sms.DataTable("earthquake_events", earthquake_events), resource='sensor_data', last_insert=True)[0][0]) # writes to earthquake_alerts earthquake_alerts = pd.DataFrame({'eq_id': [eq_id], 'site_id': [site_id], 'distance': [0]}) db.df_write(data_table = sms.DataTable("earthquake_alerts", earthquake_alerts), resource='sensor_data') # writes to operational_triggers trigger_symbol = mem.get('df_trigger_symbols') trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'earthquake') & (trigger_symbol.alert_level == 1), 'trigger_sym_id'].values[0] operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]}) qdb.alert_to_db(operational_trigger, 'operational_triggers') # details for trigger tech info earthquake_events.loc[:, 'distance'] = 0 return earthquake_events
def main(end_ts=datetime.now()): start_ts = pd.to_datetime(end_ts) - timedelta(2) print(start_ts) surficial_triggers = qdb.get_surficial_trigger(start_ts, end_ts) if len(surficial_triggers) == 0: qdb.print_out("No surficial trigger to process") for index, surficial in surficial_triggers.iterrows(): ts_updated = surficial['ts_updated'] public_ts_start = round_data_ts(ts_updated) alert_level = surficial['alert_level'] alert_symbol = surficial['alert_symbol'] alert_status = surficial['alert_status'] site_id = surficial['site_id'] site_code = surficial['site_code'] if (alert_symbol == 'lt'): if (alert_status == 1): qdb.print_out("Found valid lt surficial trigger for " + \ "%s at %s" %(site_code.upper(), ts_updated)) trigger_sym_id = qdb.get_trigger_sym_id(2, 'surficial') df = pd.DataFrame({ 'ts': [ts_updated], 'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts_updated': [ts_updated] }) qdb.alert_to_db(df, 'operational_triggers', lt_overwrite=False) qdb.print_out(" > Added l2 trigger on operational triggers") # Process only l2 and l3 with alert status of -1 (invalid) elif (alert_status == -1): valid_cotriggers = qdb.get_valid_cotriggers( site_id, public_ts_start) dont_delete = False # Check if it has co-triggers on start of event # tho highly unlikely if len(valid_cotriggers) != 0: for index, valid in valid_cotriggers.iterrows(): # Don't delete public alert entry if there # is a co-trigger that's equal or # greater of alert level if (valid['alert_level'] >= alert_level): qdb.print_out( "%s has valid co-trigger: deleting will NOT commence" % (site_code.upper())) dont_delete = True break if dont_delete == False: qdb.delete_public_alert(site_id, public_ts_start) qdb.print_out("Deleted {} public alert of {}".format( public_ts_start, site_code))
def rainfall(site_id, ts, rain_id, rain_alert='both'): """Insert values to rainfall_alerts and operational_triggers tables to (re)trigger rainfall alert Args: site_id (int): ID of site to compute rainfall analysis for. ts (datetime): Timestamp of alert trigger. rain_id (int): ID of rain gauge to use as data source. rain_alert (str: {'a', 'b', 'x', None}, default None): Type of rainfall alert. a: 1-day cumulative rainfall threshold exceeded b: 3-day cumulative rainfall threshold exceeded x: None: both 1-day and 3-day cumulative rainfall threshold exceeded """ # 4 nearest rain gauges of each site with threshold and distance from site gauges = rain.rainfall_gauges() df = gauges.loc[(gauges.site_id == site_id) & (gauges.rain_id == rain_id), ['site_id', 'rain_id', 'threshold_value']] df = df.rename(columns={'threshold_value': 'threshold'}) df.loc[:, 'ts'] = ts # rainfall cumulative based on alert level if str(rain_alert) == '0': df.loc[:, 'rain_alert'] = 0 df.loc[:, 'cumulative'] = 0 else: if rain_alert != 'x': df.loc[:, 'rain_alert'] = 'b' df.loc[:, 'cumulative'] = 1.2 * df.loc[:, 'threshold'] else: df.loc[:, 'rain_alert'] = 'x' df.loc[:, 'cumulative'] = 0.80 * df.loc[:, 'threshold'] if rain_alert == 'a' or rain_alert == 'both': dfa = df.copy() dfa.loc[:, ['cumulative', 'threshold']] = dfa.loc[:, ['cumulative', 'threshold']].div(2) dfa.loc[:, 'rain_alert'] = 'a' if rain_alert == 'a': df = dfa.copy() else: df = df.append(dfa, ignore_index=True) qdb.write_rain_alert(df) # writes to operational_triggers trigger_symbol = mem.get('df_trigger_symbols') if str(rain_alert) in ['0', 'x']: alert_level = 0 else: alert_level = 1 trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'rainfall') & (trigger_symbol.alert_level == alert_level), 'trigger_sym_id'].values[0] operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]}) qdb.alert_to_db(operational_trigger, 'operational_triggers') return df
def generate_surficial_alert(site_id = None,ts = None): """ Main alert generating function for surificial alert for a site at specified time Parameters ------------------ site_id: int site_id of site of interest ts: timestamp timestamp of alert generation Returns ------------------- Prints the generated alert and writes to marker_alerts database """ #### Obtain system arguments from command prompt if site_id == None and ts == None: site_id, ts = sys.argv[1].lower(),sys.argv[2].lower() #### Config variables num_pts = int(sc['surficial']['surficial_num_pts']) ts_start = pd.to_datetime(ts) - timedelta(sc['surficial']['meas_plot_window']) #### Get latest ground data surficial_data_df = qdb.get_surficial_data(site_id, ts_start, ts, num_pts) #### Generate Marker alerts marker_data_df = surficial_data_df.groupby('marker_id',as_index = False) marker_alerts = marker_data_df.apply(evaluate_marker_alerts, ts) #### Write to marker_alerts table data_table = sms.DataTable('marker_alerts', marker_alerts) db.df_write(data_table) #### Generate surficial alert for site surficial_alert = get_surficial_alert(marker_alerts,site_id) #### Write to db qdb.alert_to_db(surficial_alert,'operational_triggers') #### Plot current ground meas if sc['surficial']['print_meas_plot']: ### Retreive the surficial data to plot surficial_data_to_plot = surficial_data_df.loc[surficial_data_df.ts >= ts_start, :] ### Plot the surficial data plot_site_meas(surficial_data_to_plot, ts) return surficial_data_df
def subsurface(site_id, ts, alert_level): """Insert values to node_alerts, tsm_alerts, and operational_triggers to (re)trigger subsurface alert. Args: site_id (int): ID of site to compute subsurface analysis for. ts (datetime): Timestamp of alert trigger. alert_level (int: {0, 2, 3}, default None): Subsurface alert level. """ # get tsm_id query = "SELECT tsm_id FROM tsm_sensors " query += "where site_id = {} ".format(site_id) query += "and (date_deactivated is null or date_deactivated > '{}')".format(ts) tsm_id = db.df_read(query, resource='sensor_data').values.flatten() tsm_id = random.choice(tsm_id) # writes to node_alerts; defaults to node 1 and vel_alert ts_list = pd.date_range(end=ts, freq='30min', periods=4) node_alerts = pd.DataFrame({'ts': ts_list, 'node_id': [1]*len(ts_list), 'tsm_id': [tsm_id]*len(ts_list), 'disp_alert': [0]*len(ts_list), 'vel_alert': [alert_level]*len(ts_list)}) db.df_write(data_table = sms.DataTable("node_alerts", node_alerts), resource='sensor_data') # writes to tsm_alerts tsm_alerts = pd.DataFrame({'ts': [ts], 'tsm_id': [tsm_id], 'alert_level': [alert_level], 'ts_updated': [ts]}) db.df_write(data_table = sms.DataTable("tsm_alerts", tsm_alerts), resource='sensor_data') # writes to operational_triggers trigger_symbol = mem.get('df_trigger_symbols') trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'subsurface') & (trigger_symbol.alert_level == alert_level), 'trigger_sym_id'].values[0] operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]}) qdb.alert_to_db(operational_trigger, 'operational_triggers') # details for trigger tech info tsm_alerts.loc[:, 'node_id'] = 1 tsm_alerts.loc[:, 'disp_alert'] = 0 tsm_alerts.loc[:, 'vel_alert'] = alert_level return tsm_alerts
def generate_surficial_alert(site_id=None, ts=None, marker_id=None, to_json=False, plot=False): """ Main alert generating function for surificial alert for a site at specified time Parameters ------------------ site_id: int site_id of site of interest ts: timestamp timestamp of alert generation Returns ------------------- Prints the generated alert and writes to marker_alerts database """ #### Obtain system arguments from command prompt if site_id == None and ts == None: site_id, ts = sys.argv[1].lower(), sys.argv[2].lower() ts = pd.to_datetime(ts) #### Config variables num_pts = int(sc['surficial']['surficial_num_pts']) ts_start = pd.to_datetime(ts) - timedelta( sc['surficial']['meas_plot_window']) #### Get latest ground data surficial_data_df = qdb.get_surficial_data(site_id, ts_start, ts, num_pts) surficial_data_df.loc[:, 'ts'] = surficial_data_df.loc[:, 'ts'].apply( lambda x: pd.to_datetime(x)) #### Generate Marker alerts if marker_id != None: surficial_data_df = surficial_data_df.loc[ surficial_data_df.marker_id == marker_id, :] marker_data_df = surficial_data_df.groupby('marker_id', as_index=False) marker_alerts = marker_data_df.apply(evaluate_marker_alerts, ts, to_json) #### Write to marker_alerts table qdb.write_marker_alerts(marker_alerts[[ 'ts', 'marker_id', 'data_id', 'displacement', 'time_delta', 'alert_level', 'processed' ]]) #### Generate surficial alert for site surficial_alert = get_surficial_alert(marker_alerts, site_id) #### Write to db qdb.alert_to_db(surficial_alert, 'operational_triggers') #### Plot current ground meas if sc['surficial']['print_meas_plot'] and plot: ### Retreive the surficial data to plot surficial_data_to_plot = surficial_data_df.loc[ surficial_data_df.ts >= ts_start, :] ### Plot the surficial data plot_site_meas(surficial_data_to_plot, ts) if to_json: return marker_alerts['trend_alert'][0] return surficial_data_df
def main(rain_props, end, sc, trigger_symbol, write_to_db=True): """Computes rainfall alert. Args: rain_props (dataframe): Contains rain gauges that can be used in rainfall analysis. end (datetime): Timestamp of alert to be computed. sc (dict): Server configuration. trigger_symbol: Alert symbol per alert level. Returns: dataframe: Summary of cumulative rainfall, threshold, alert and rain gauge used in analysis of rainfall. """ #rainfall properties site_id = rain_props['site_id'].values[0] site_code = rain_props['site_code'].values[0] twoyrmax = rain_props['threshold_value'].values[0] halfmax = twoyrmax / 2 start = end - timedelta(float(sc['rainfall']['roll_window_length'])) offsetstart = start - timedelta(hours=0.5) try: if qdb.get_alert_level(site_id, end)['alert_level'].values[0] > 0: start_monitor = pub.event_start(site_id, end) op_trig = pub.get_operational_trigger(site_id, start_monitor, end) op_trig = op_trig[op_trig.alert_level > 0] validity = max(op_trig['ts_updated'].values) validity = pub.release_time(pd.to_datetime(validity)) \ + timedelta(1) if 3 in op_trig['alert_level'].values: validity += timedelta(1) if end + timedelta(hours=0.5) >= validity: write_alert = True else: write_alert = False else: write_alert = False except: write_alert = False #data is gathered from nearest rain gauge rainfall, gauge_name, rain_id = get_unempty_rg_data( rain_props, offsetstart, start, end) summary = summary_writer(site_id, site_code, gauge_name, rain_id, twoyrmax, halfmax, rainfall, end, write_alert) operational_trigger = summary[['site_id', 'alert']] operational_trigger['alert'] = operational_trigger['alert'].map({ -1: trigger_symbol[trigger_symbol.alert_level == -1]['trigger_sym_id'].values[0], 0: trigger_symbol[trigger_symbol.alert_level == 0]['trigger_sym_id'].values[0], 1: trigger_symbol[trigger_symbol.alert_level == 1] ['trigger_sym_id'].values[0] }) operational_trigger['ts'] = str(end) operational_trigger['ts_updated'] = str(end) operational_trigger = operational_trigger.rename( columns={'alert': 'trigger_sym_id'}) if write_to_db == True: qdb.alert_to_db(operational_trigger, 'operational_triggers') return summary
def main(tsm_name='', end='', end_mon=False): run_start = datetime.now() qdb.print_out(run_start) qdb.print_out(tsm_name) if tsm_name == '': tsm_name = sys.argv[1].lower() if end == '': try: end = pd.to_datetime(sys.argv[2]) except: end = datetime.now() else: end = pd.to_datetime(end) window, sc = rtw.get_window(end) tsm_props = qdb.get_tsm_list(tsm_name)[0] data = proc.proc_data(tsm_props, window, sc) tilt = data.tilt[window.start:window.end] lgd = data.lgd tilt = tilt.reset_index().sort_values('ts', ascending=True) if lgd.empty: qdb.print_out('%s: no data' % tsm_name) return nodal_tilt = tilt.groupby('node_id', as_index=False) alert = nodal_tilt.apply(lib.node_alert, colname=tsm_props.tsm_name, num_nodes=tsm_props.nos, disp=float(sc['subsurface']['disp']), vel2=float(sc['subsurface']['vel2']), vel3=float(sc['subsurface']['vel3']), k_ac_ax=float(sc['subsurface']['k_ac_ax']), lastgooddata=lgd, window=window, sc=sc).reset_index(drop=True) alert.loc[:, 'col_alert'] = -1 col_alert = pd.DataFrame({ 'node_id': range(1, tsm_props.nos + 1), 'col_alert': [-1] * tsm_props.nos }) node_col_alert = col_alert.groupby('node_id', as_index=False) node_col_alert.apply(lib.column_alert, alert=alert, num_nodes_to_check=int( sc['subsurface']['num_nodes_to_check']), k_ac_ax=float(sc['subsurface']['k_ac_ax']), vel2=float(sc['subsurface']['vel2']), vel3=float(sc['subsurface']['vel3'])) valid_nodes_alert = alert.loc[~alert.node_id.isin(data.inv)] if max(valid_nodes_alert['col_alert'].values) > 0: pos_alert = valid_nodes_alert[valid_nodes_alert.col_alert > 0] site_alert = trend.main(pos_alert, tsm_props.tsm_id, window.end, data.inv) else: site_alert = max( lib.get_mode(list(valid_nodes_alert['col_alert'].values))) tsm_alert = pd.DataFrame({ 'ts': [window.end], 'tsm_id': [tsm_props.tsm_id], 'alert_level': [site_alert], 'ts_updated': [window.end] }) qdb.alert_to_db(tsm_alert, 'tsm_alerts') qdb.write_op_trig(tsm_props.site_id, window.end) qdb.print_out(tsm_alert) qdb.print_out('run time = ' + str(datetime.now() - run_start)) return tilt
def surficial(site_id, ts, alert_level): """Insert values to marker_observations, marker_data, marker_alerts, and operational_triggers to (re)trigger surficial alert. Args: site_id (int): ID of site to compute surficial analysis for. ts (datetime): Timestamp of alert trigger. alert_level (int: {0, 1, 2, 3}, default None): Surficial alert level. """ # get last data for site_id conn = mem.get('DICT_DB_CONNECTIONS') query = "SELECT ts, marker_id, marker_name, measurement " query += "FROM {analysis}.marker_observations " query += "INNER JOIN {common}.sites USING (site_id) " query += "INNER JOIN {analysis}.marker_data using (mo_id) " query += "INNER JOIN (SELECT data_id, displacement, time_delta, alert_level, processed FROM {analysis}.marker_alerts) sub1 USING (data_id) " query += "INNER JOIN (SELECT marker_id, marker_name FROM {analysis}.view_marker_history) sub2 USING (marker_id) " query += "WHERE site_id = {site_id} " query += "AND ts IN ( " query += " SELECT MAX(ts) FROM {analysis}.marker_observations " query += " WHERE ts < '{ts}' " query += " AND site_id = {site_id})" query = query.format(analysis=conn['analysis']['schema'], common=conn['common']['schema'], site_id=site_id, ts=ts) df = db.df_read(query, resource='sensor_analysis') # compute diff in measurements to reach threshold if alert_level == 3: rate = 1.8 elif alert_level in (1,2): rate = 0.25 else: rate = 0 meas_diff = np.ceil(rate * (ts-df.ts[0]).total_seconds()/3600) # input measurements in inbox gndmeas = df.loc[:, ['marker_id', 'marker_name', 'measurement']] gndmeas.loc[:, 'ts'] = ts gndmeas.loc[:, 'measurement'] += meas_diff if alert_level == 1: temp_gndmeas = gndmeas.copy() temp_gndmeas.loc[:, 'ts'] -= (ts - df.ts[0])/2 temp_gndmeas.loc[:, 'measurement'] += meas_diff # filler measurement for alert level 1 df_obv = pd.DataFrame({'meas_type': ['ROUTINE'], 'site_id': [site_id], 'weather': ['MAARAW'], 'observer_name':['TOPSSOFTWAREINFRA'], 'reliability': [1], 'data_source': ['SMS'], 'ts': [temp_gndmeas.ts[0]]}) mo_id = int(db.df_write(data_table=sms.DataTable("marker_observations", df_obv), resource='sensor_data', last_insert=True)[0][0]) temp_gndmeas.loc[:, 'mo_id'] = mo_id df_data = temp_gndmeas.loc[:, ['mo_id', 'marker_id', 'measurement']] db.df_write(data_table = sms.DataTable("marker_data", df_data), resource='sensor_data') surf.generate_surficial_alert(site_id = site_id, ts = temp_gndmeas.ts[0]) # measurement for ts given df_obv = pd.DataFrame({'meas_type': ['ROUTINE'], 'site_id': [site_id], 'weather': ['MAARAW'], 'observer_name':['TOPSSOFTWAREINFRA'], 'reliability': [1], 'data_source': ['SMS'], 'ts': [ts]}) mo_id = int(db.df_write(data_table=sms.DataTable("marker_observations", df_obv), resource='sensor_data', last_insert=True)[0][0]) gndmeas.loc[:, 'mo_id'] = mo_id df_data = gndmeas.loc[:, ['mo_id', 'marker_id', 'measurement']] db.df_write(data_table = sms.DataTable("marker_data", df_data), resource='sensor_data') surf.generate_surficial_alert(site_id = site_id, ts = ts) # details for trigger tech info time_delta = np.round((ts - df.ts[0]).total_seconds()/3600, 2) if alert_level == 1: time_delta /= 2 gndmeas.loc[:, 'displacement'] = meas_diff gndmeas.loc[:, 'time_delta'] = time_delta gndmeas.loc[:, 'alert_level'] = alert_level # writes to operational_triggers trigger_symbol = mem.get('df_trigger_symbols') trigger_sym_id = trigger_symbol.loc[(trigger_symbol.trigger_source == 'surficial') & (trigger_symbol.alert_level == alert_level), 'trigger_sym_id'].values[0] operational_trigger = pd.DataFrame({'site_id': [site_id], 'trigger_sym_id': [trigger_sym_id], 'ts': [ts], 'ts_updated': [ts]}) qdb.alert_to_db(operational_trigger, 'operational_triggers') return gndmeas