def main_l0t(ts=datetime.now()): query = "SELECT * FROM smsalerts where ts_ack >= '%s' and alertstat = 'valid' and alertmsg like '%s'" % ( pd.to_datetime(ts) - timedelta(hours=1), '%l0t%') df = q.GetDBDataFrame(query) if len(df) != 0: dfid = df.groupby('alert_id') alertdf = dfid.apply(alertmsg) alertdf = alertdf.reset_index(drop=True) if len(alertdf) != 0: sites = str(list(alertdf.site.values)).replace('[', '(').replace( ']', ')') query = "SELECT * FROM (SELECT * FROM site_level_alert WHERE site in %s AND alert = 'l0t' ORDER BY timestamp DESC) AS SUB GROUP BY site" % sites df = q.GetDBDataFrame(query) df['alert'] = 'l2' engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) for i in range(len(df)): try: df[i:i + 1].to_sql(name='site_level_alert', con=engine, if_exists='append', schema=q.Namedb, index=False) except: pass
def SiteCoord(): RGdf = q.GetRainProps('rain_props') RGdf = RGdf.loc[RGdf.name != 'msl'] RG = list(RGdf.rain_arq.dropna().apply(lambda x: x[:len(x) - 1])) RG = '|'.join(RG) query = "SELECT * FROM senslopedb.site_column where name REGEXP '%s'" % RG RGCoord = q.GetDBDataFrame(query) RGCoord['name'] = RGCoord.name.apply(lambda x: x + 'w') RG = list(RGdf.rain_senslope.dropna().apply(lambda x: x[:len(x) - 1])) RG = '|'.join(RG) query = "SELECT * FROM senslopedb.site_column where name REGEXP '%s'" % RG df = q.GetDBDataFrame(query) df['name'] = df.name.apply(lambda x: x[0:3] + 'w') RGCoord = RGCoord.append(df) RGCoord = RGCoord.drop_duplicates( ['sitio', 'barangay', 'municipality', 'province']) RGCoord = RGCoord[['name', 'lat', 'lon', 'barangay', 'province']] RGCoord = RGCoord.rename(columns={ 'name': 'dev_id', 'barangay': 'location' }) RGCoord['type'] = 'SenslopeRG' RGCoord = RGCoord.sort('dev_id') return RGCoord
def write_site_alert(site, window): if site != 'messb' and site != 'mesta': site = site[0:3] + '%' query = "SELECT * FROM ( SELECT * FROM senslopedb.column_level_alert WHERE site LIKE '%s' AND updateTS >= '%s' ORDER BY timestamp DESC) AS sub GROUP BY site" %(site, window.end) else: query = "SELECT * FROM ( SELECT * FROM senslopedb.column_level_alert WHERE site = '%s' AND updateTS >= '%s' ORDER BY timestamp DESC) AS sub GROUP BY site" %(site, window.end) df = q.GetDBDataFrame(query) if 'L3' in list(df.alert.values): site_alert = 'L3' elif 'L2' in list(df.alert.values): site_alert = 'L2' elif 'L0' in list(df.alert.values): site_alert = 'L0' else: site_alert = 'ND' if site == 'messb': site = 'msl' if site == 'mesta': site = 'msu' output = pd.DataFrame({'timestamp': [window.end], 'site': [site[0:3]], 'source': ['sensor'], 'alert': [site_alert], 'updateTS': [window.end]}) alert_toDB(output, 'site_level_alert', window) return output
def main_regen(month='', site_ratio_start='', site_ratio_end=''): if month == '': month = int(sys.argv[1]) if site_ratio_start == '': site_ratio_start = int(sys.argv[2]) if site_ratio_end == '': try: site_ratio_end = int(sys.argv[3]) except: site_ratio_end = '' query = "SELECT name, date_activation FROM site_column" query += " ORDER BY name" df = q.GetDBDataFrame(query) df = df[['name']] df['site'] = df['name'].apply(lambda x: x[0:3]) if site_ratio_end == '': site_ratio_end = len(set(df.site)) if site_ratio_start == '': site_ratio_start = 0 df = df[df.site.isin( sorted(set(df.site))[site_ratio_start:site_ratio_end])] site_df = df.groupby('site', as_index=False) site_df.apply(recent_end_ts, month=month)
def tsm_plot(tsm_name, end, shift_datetime): query = "SELECT max(timestamp) AS ts FROM %s" % tsm_name try: ts = pd.to_datetime(qdb.GetDBDataFrame(query)['ts'].values[0]) if ts < shift_datetime: return except: return if ts > end: ts = end window, config = rtw.getwindow(ts) col = qdb.GetSensorList(tsm_name) monitoring = gen.genproc(col[0], window, config, fixpoint=config.io.column_fix) plotter.main(monitoring, window, config, realtime=False, non_event_path=False)
def to_db(df): print df if not qdb.DoesTableExist('uptime'): create_uptime() ts = pd.to_datetime(df['ts'].values[0]) query = "SELECT * FROM uptime " query += "WHERE (ts <= '%s' " % ts query += " AND ts_updated >= '%s') " % ts query += "OR (ts_updated >= '%s' " % (ts - timedelta(hours=0.5)) query += " AND ts_updated <= '%s') " % ts query += "ORDER BY ts DESC LIMIT 1" prev_uptime = qdb.GetDBDataFrame(query) if len(prev_uptime) == 0 or prev_uptime['site_count'].values[0] != df[ 'site_count'].values[0]: qdb.PushDBDataFrame(df, 'uptime', index=False) elif pd.to_datetime( prev_uptime['ts_updated'].values[0]) < df['ts_updated'].values[0]: query = "UPDATE uptime " query += "SET ts_updated = '%s' " % pd.to_datetime( df['ts_updated'].values[0]) query += "WHERE uptime_id = %s" % prev_uptime['uptime_id'].values[0] db, cur = qdb.SenslopeDBConnect(qdb.Namedb) cur.execute(query) db.commit() db.close()
def GetASTIdata(site, rain_noah, offsetstart): ##INPUT: ##site; str ##offsetstart; datetime; starting point of interval with offset to account for moving window operations ##rain_noah; float; rain noah id of noah rain gauge near the site ##OUTPUT: ##df; dataframe; rainfall from noah rain gauge #data from noah rain gauge saved at local database try: if not math.isnan(rain_noah): rain_noah = int(rain_noah) db, cur = q.SenslopeDBConnect(q.Namedb) query = "select timestamp,rval from senslopedb.rain_noah_%s" % str( rain_noah) query = query + " where timestamp >= timestamp('%s')" % offsetstart query = query + " order by timestamp desc" df = q.GetDBDataFrame(query) df.columns = ['timestamp', 'rain'] df.timestamp = pd.to_datetime(df.timestamp) df.set_index('timestamp', inplace=True) return df except: print 'Table senslopedb.rain_noah_' + str(rain_noah) + " doesn't exist" df = pd.DataFrame(data=None) return df
def subsurface(site, end, shift_datetime): sensor_site = site[0:3] + '%' query = "SELECT * FROM site_column_props where name LIKE '%s'" % sensor_site df = qdb.GetDBDataFrame(query) tsm_set = set(df['name'].values) for tsm_name in tsm_set: tsm_plot(tsm_name, end, shift_datetime)
def alert_toDB(df, table_name, window): query = "SELECT * FROM senslopedb.%s WHERE site = '%s' and timestamp <= '%s' AND updateTS >= '%s' ORDER BY timestamp DESC LIMIT 1" % ( table_name, df.site.values[0], window.end, window.end - timedelta(hours=1)) try: df2 = q.GetDBDataFrame(query) except: df2 = pd.DataFrame() if len(df2) == 0 or df2.alert.values[0] != df.alert.values[0]: engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) df.to_sql(name=table_name, con=engine, if_exists='append', schema=q.Namedb, index=False) elif df2.alert.values[0] == df.alert.values[0]: db, cur = q.SenslopeDBConnect(q.Namedb) query = "UPDATE senslopedb.%s SET updateTS='%s' WHERE site = '%s' and source = 'sensor' and alert = '%s' and timestamp = '%s'" % ( table_name, window.end, df2.site.values[0], df2.alert.values[0], pd.to_datetime(str(df2.timestamp.values[0]))) cur.execute(query) db.commit() db.close()
def main(name='',custom_end = ''): if name == '': name = sys.argv[1].lower() start = datetime.now() print "=========================== {} {} =========================".format(str(name), custom_end) window,config = rtw.getwindow(end = custom_end ) col = q.GetSensorList(name) monitoring = g.genproc(col[0], window, config, config.io.column_fix) lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name) monitoring_vel = monitoring.vel[window.start:window.end] monitoring_vel = monitoring_vel.reset_index().sort_values('ts',ascending=True) nodal_dv = monitoring_vel.groupby('id') alert = nodal_dv.apply(node_alert2, colname=monitoring.colprops.name, num_nodes=monitoring.colprops.nos, T_disp=config.io.t_disp, T_velL2=config.io.t_vell2, T_velL3=config.io.t_vell3, k_ac_ax=config.io.k_ac_ax, lastgooddata=lgd,window=window,config=config) alert = column_alert(alert, config.io.num_nodes_to_check, config.io.k_ac_ax) not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus(1).site == name].node.values for i in not_working: alert = alert.loc[alert.id != i] if 'L3' in list(alert.col_alert.values): site_alert = 'L3' elif 'L2' in list(alert.col_alert.values): site_alert = 'L2' else: site_alert = min(getmode(list(alert.col_alert.values))) column_level_alert = pd.DataFrame({'timestamp': [window.end], 'site': [monitoring.colprops.name], 'source': ['sensor'], 'alert': [site_alert], 'updateTS': [window.end]}) print column_level_alert if site_alert in ('L2', 'L3'): A.main(monitoring.colprops.name,custom_end) else: alert_toDB(column_level_alert, 'column_level_alert', window) write_site_alert(monitoring.colprops.name, window) ####################### query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' and source = 'public' ORDER BY updateTS DESC LIMIT 1" %monitoring.colprops.name[0:3] public_alert = q.GetDBDataFrame(query) if public_alert.alert.values[0] != 'A0' or RoundTime(pd.to_datetime(public_alert.timestamp.values[0])) == RoundTime(window.end): plot_time = ['07:30:00', '19:30:00'] if str(window.end.time()) in plot_time: print "Plotter.main(monitoring, window, config)" elif RoundTime(pd.to_datetime(public_alert.timestamp.values[0])) == RoundTime(window.end): print "Plotter.main(monitoring, window, config)" ####################### print 'run time =', datetime.now()-start return column_level_alert,monitoring
def NOAHRGCoord(): db, cur = q.SenslopeDBConnect(q.Namedb) query = "SELECT * FROM senslopedb.rain_gauge" RGCoord = q.GetDBDataFrame(query) RGCoord['dev_id'] = RGCoord.dev_id.apply(lambda x: 'rain_noah_' + str(x)) RGCoord = RGCoord.rename(columns={'latitude': 'lat', 'longitude': 'lon'}) RGCoord['type'] = 'NOAHRG' return RGCoord
def alert_regen(df, ts): col = df['name'].values[0] query = "SELECT max(timestamp) FROM %s " % col data_ts = q.GetDBDataFrame(query).values[0][0] if data_ts == None: pass if pd.to_datetime(data_ts) >= ts: main(col, ts)
def alert_toDB(df, table_name, window): query = "SELECT * FROM %s WHERE site = '%s' and source = 'noadjfilt' and timestamp <= '%s' AND updateTS >= '%s' ORDER BY timestamp DESC LIMIT 1" % ( table_name, df.site.values[0], window.end, window.end - timedelta(hours=0.5)) try: df2 = q.GetDBDataFrame(query) except: df2 = pd.DataFrame() try: same_alert = df2['alert'].values[0] == df['alert'].values[0] except: same_alert = False query = "SELECT EXISTS(SELECT * FROM %s" % table_name query += " WHERE timestamp = '%s' AND site = '%s'" % (pd.to_datetime( df['updateTS'].values[0]), df['site'].values[0]) if table_name == 'site_level_alert': query += " AND source = 'noadjfilt'" query += ")" if q.GetDBDataFrame(query).values[0][0] == 1: inDB = True else: inDB = False if (len(df2) == 0 or not same_alert) and not inDB: engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) df.to_sql(name=table_name, con=engine, if_exists='append', schema=q.Namedb, index=False) elif same_alert and df2['updateTS'].values[0] < df['updateTS'].values[0]: db, cur = q.SenslopeDBConnect(q.Namedb) query = "UPDATE senslopedb.%s SET updateTS='%s' WHERE site = '%s' and source = 'noadjfilt' and alert = '%s' and timestamp = '%s'" % ( table_name, window.end, df2.site.values[0], df2.alert.values[0], pd.to_datetime(str(df2.timestamp.values[0]))) cur.execute(query) db.commit() db.close()
def main_inv(ts=datetime.now()): # sites with invalid alert query = "SELECT * FROM smsalerts where ts_set >= '%s' and alertstat = 'invalid'" % ( pd.to_datetime(ts) - timedelta(30)) df = q.GetDBDataFrame(query) # wrong format in db if 409 in df['alert_id'].values: alertmsg409 = 'As of 2017-02-03 09:36\nosl:A2:ground' df.loc[df['alert_id'] == 409, ['alertmsg']] = alertmsg409 if 408 in df['alert_id'].values: alertmsg408 = 'As of 2017-02-02 20:11\npla:A2:"sensor(plat:20)"' df.loc[df['alert_id'] == 408, ['alertmsg']] = alertmsg408 dfid = df.groupby('alert_id') alertdf = dfid.apply(alertmsg) alertdf = alertdf.reset_index(drop=True) alertdf = alertdf.loc[(alertdf.alert != 'l0t')] # remove invalid public and internal alert in db invalertdf = alertdf.loc[alertdf.timestamp >= ts - timedelta(hours=3)] invalertdf = invalertdf[~(invalertdf.source.str.contains('sensor'))] invalertdf = invalertdf.loc[(invalertdf.alert != 'A1')] sitealertdf = invalertdf.groupby('site') sitealertdf.apply(removeinvpub) # write site with current invalid alert to InvalidAlert.txt query = "(SELECT * FROM (SELECT * FROM site_level_alert WHERE source = 'public' \ AND alert != 'A0' AND updateTS >= '%s' ORDER BY timestamp DESC) AS SUB GROUP BY site)" % ( ts - timedelta(hours=0.5)) withalert = q.GetDBDataFrame(query) sitewithalert = withalert.groupby('site') alertdf = alertdf[alertdf.site.isin(withalert['site'].values)] finaldf = sitewithalert.apply(currentinv, df=alertdf) finaldf = finaldf.sort('timestamp', ascending=False).drop_duplicates( ['site', 'source'], keep='first').reset_index(drop='True') finaldf.to_csv('InvalidAlert.txt', sep=':', header=True, index=False, mode='w') return finaldf
def getGroundDF(site, start, end): query = 'SELECT timestamp,site_id,crack_id, meas FROM gndmeas ' query += "where site_id = '%s' " % site query += "and timestamp <= '%s' " % end query += "and timestamp > '%s' " % start df = q.GetDBDataFrame(query) df['gts'] = df.timestamp.apply(datenum) return df
def WriteToDB(site): ts = datetime.now() ts = adjust_time(ts) query = "SELECT * FROM site_level_alert WHERE source = 'public' and site = '%s' ORDER BY timestamp DESC LIMIT 1" %site prevpubdf = q.GetDBDataFrame(query) query = "SELECT * FROM site_level_alert WHERE source = 'internal' and site = '%s' ORDER BY timestamp DESC LIMIT 1" %site previnternaldf = q.GetDBDataFrame(query) internal = previnternaldf.alert.values[0] engine = create_engine('mysql://'+q.Userdb+':'+q.Passdb+'@'+q.Hostdb+':3306/'+q.Namedb) if prevpubdf.alert.values[0] == 'A0': d1df = pd.DataFrame({'timestamp': [ts], 'site': [site[0:3]], 'source':['on demand'], 'alert':['d1'], 'updateTS': [ts]}) pubdf = pd.DataFrame({'timestamp': [ts], 'site': [site[0:3]], 'source':['public'], 'alert':['A1'], 'updateTS': [ts]}) internaldf = pd.DataFrame({'timestamp': [ts], 'site': [site[0:3]], 'source':['internal'], 'alert':[internal + '-D'], 'updateTS': [ts]}) pubdf.to_sql(name = 'site_level_alert', con = engine, if_exists = 'append', schema = q.Namedb, index = False) d1df.to_sql(name = 'site_level_alert', con = engine, if_exists = 'append', schema = q.Namedb, index = False) internaldf.to_sql(name = 'site_level_alert', con = engine, if_exists = 'append', schema = q.Namedb, index = False) else: d1df = pd.DataFrame({'timestamp': [ts], 'site': [site[0:3]], 'source':['on demand'], 'alert':['d1'], 'updateTS': [ts]}) internaldf = pd.DataFrame({'timestamp': [ts], 'site': [site[0:3]], 'source':['internal'], 'alert':[internal + 'D'], 'updateTS': [ts]}) d1df.to_sql(name = 'site_level_alert', con = engine, if_exists = 'append', schema = q.Namedb, index = False) internaldf.to_sql(name = 'site_level_alert', con = engine, if_exists = 'append', schema = q.Namedb, index = False)
def volt_filter(dfc): #assume for a single node lang ito df = dfc.copy() # print df name = str(df.head(1).iloc[0][1]) n_id = int(df.head(1).iloc[0][2]) query = """ select vmax,vmin from senslopedb.node_accel_table where site_name = '%s' and node_id = %d limit 1""" % ( name, n_id) dfv = qdb.GetDBDataFrame(query) vmin = dfv.head(1).iloc[0][1] vmax = dfv.head(1).iloc[0][0] df = df[(df.batt >= vmin) & (df.batt <= vmax)] return df
def main(start, end): query = "SELECT * FROM %s.site_level_alert where source = 'internal' and alert not like '%s' and \ ((timestamp <= '%s' and updateTS >= '%s') or (timestamp >= '%s' and timestamp <= '%s') \ or (updateTS >= '%s' and updateTS <= '%s'))" %(q.Namedb, 'ND%', start, end, start, end, start, end) df = q.GetDBDataFrame(query) rangeTS = pd.date_range(start='2017-01-01', end = '2017-04-01', freq='30min') rangeTS = rangeTS[0:-1] pub_uptime = pd.DataFrame({'ts':rangeTS, 'status':['-']*len(rangeTS)}) pub_uptimeTS = pub_uptime.groupby('ts') pub_uptime = pub_uptimeTS.apply(uptime, df=df) percent_up = 100 - (100. * len(pub_uptime[pub_uptime.status == 'down'])/len(pub_uptime)) return percent_up, pub_uptime, df
def get_surficial_df(site, start, end): query = "SELECT timestamp, site_id, crack_id, meas FROM gndmeas" query += " WHERE site_id = '%s'" % site query += " AND timestamp <= '%s'" % end query += " AND timestamp > '%s'" % start query += " ORDER BY timestamp" df = qdb.GetDBDataFrame(query) df['timestamp'] = pd.to_datetime(df['timestamp']) df['crack_id'] = map(lambda x: x.upper(), df['crack_id']) marker_df = df.groupby('crack_id', as_index=False) df = marker_df.apply(zeroed, column='meas') return df
def recent_end_ts(df, month): site = df['site'].values[0] query = "SELECT max(updateTS) FROM site_level_alert " query += "where site like '%s' " % (site + '%') query += "and source = 'noadjfilt'" query += "and timestamp < '2017-%s-01'" % (month + 1) end_ts = q.GetDBDataFrame(query).values[0][0] if end_ts == None: end_ts = '2017-%s-01' % month end_ts = pd.to_datetime(end_ts) ts = end_ts while ts < pd.to_datetime('2017-%s-01' % (month + 1)): col_df = df.groupby('name', as_index=False) col_df.apply(alert_regen, ts=ts) ts += timedelta(hours=0.5)
def plotRain(rain, rainsite, ax): ax.plot(rain.gts, rain.one_d, color='green', label='1-day cml', alpha=1) ax.plot(rain.gts, rain.thr_d, color='blue', label='3-day cml', alpha=1) # ax2=ax.twinx() # ax2.plot(rain.gts,rain.rain, marker='.',color='red', label = '15 Min',alpha=1) if rainsite[0:3] == 'mes': rain_props_name = 'msu' else: rain_props_name = rainsite[0:3] query = "SELECT * FROM rain_props where name = '%s'" % rain_props_name df = q.GetDBDataFrame(query) twoyrmax = df['max_rain_2year'].values[0] halfmax = twoyrmax / 2 # ax.axhline(halfmax,color='green',alpha=1) # ax.axhline(twoyrmax,color='blue',alpha=1) ax.plot(rain.gts, [halfmax] * len(rain.gts), color='green', label='1/2 of 2-yr max', alpha=1, linestyle='--') ax.plot(rain.gts, [twoyrmax] * len(rain.gts), color='blue', label='2-yr max', alpha=1, linestyle='--') ax.set_ylim([0, rain.thr_d.max() * 1.1]) ax.set_title("%s Rainfall Data" % rainsite) ax.set_ylabel('1D, 3D Rain (mm)') # ax2.set_ylabel('15 Minute Rain (mm)') ax.set_yticks(np.linspace(ax.get_ybound()[0], twoyrmax + 5, 7)) # ax2.set_yticks(np.linspace(ax2.get_ybound()[0], ax2.get_ybound()[1], 7)) # ax2.grid(b=False) ax.legend(loc='upper left', fontsize='small', fancybox=True, framealpha=0.5) # ax2.legend(loc='lower right') return 0
def main(end=''): start = datetime.now() if end == '': try: end = pd.to_datetime(sys.argv[1]) if end > start + timedelta(hours=0.5): print 'invalid timestamp' return except: end = datetime.now() else: end = pd.to_datetime(end) end = round_data_time(end) shift_datetime = round_shift_time(end) if end.time() not in [ time(3, 30), time(7, 30), time(11, 30), time(15, 30), time(19, 30), time(23, 30) ]: return query = "SELECT * FROM site_level_alert " query += "WHERE source = 'public' " query += "AND ((updateTS >= '%s' " % (end - timedelta(hours=0.5)) query += " AND timestamp <= '%s' " % end query += " AND alert REGEXP '1|2|3') " query += "OR (timestamp = '%s' " % end query += " AND alert = 'A0')) " query += "ORDER BY timestamp DESC" public_alert = qdb.GetDBDataFrame(query) if len(public_alert) != 0: rain.main(site='', end=end, Print=True, db_write=False) site_public_alert = public_alert.groupby('site', as_index=False) site_public_alert.apply(site_plot, end=end, shift_datetime=shift_datetime)
def plot_rain(ax, df, rain_gauge, plot_inst=True): ax.plot(df.ts, df.one, color='green', label='1-day cml', alpha=1) ax.plot(df.ts, df.three, color='blue', label='3-day cml', alpha=1) if max(list(df.one) + list(df.three)) >= 300: ax.set_ylim([0, 300]) if plot_inst: ax2 = ax.twinx() width = float(0.004 * (max(df['ts']) - min(df['ts'])).days) ax2.bar(df['ts'].apply(lambda x: pltdates.date2num(x)), df.rain, width=width, alpha=0.05, color='k', label='30min rainfall') ax2.xaxis_date() query = "SELECT * FROM rain_props where name = '%s'" % site twoyrmax = qdb.GetDBDataFrame(query)['max_rain_2year'].values[0] halfmax = twoyrmax / 2 ax.plot(df.ts, [halfmax] * len(df.ts), color='green', label='half of 2-yr max', alpha=1, linestyle='--') ax.plot(df.ts, [twoyrmax] * len(df.ts), color='blue', label='2-yr max', alpha=1, linestyle='--') ax.set_title("%s Rainfall Data" % rain_gauge.upper(), fontsize='medium') ax.set_ylabel('1D, 3D Rain\n(mm)', fontsize='small') ax.legend(loc='upper left', fontsize='x-small', fancybox=True, framealpha=0.5)
def to_write_notif(site, end): try: query = "SELECT * FROM rainfall_notif" query += " WHERE ts >= '%s' AND site = %s" % (end - timedelta(1), site) query += " ORDER BY ts DESC LIMIT 1" notif = q.GetDBDataFrame(query) except: notif = pd.DataFrame() if len(notif) == 0 or end.time() in [ time(3, 30), time(7, 30), time(11, 30), time(15, 30), time(19, 30), time(23, 30) ]: write_notif = True else: write_notif = False return write_notif
def removeinvpub(df): try: ts = pd.to_datetime(df['timestamp'].values[0]) db, cur = q.SenslopeDBConnect(q.Namedb) query = "SELECT * FROM (SELECT * FROM site_level_alert WHERE site = '%s' and source = 'public' and alert like '%s' and timestamp >= '%s' and updateTS <= '%s' order by timestamp desc) AS sub GROUP BY source" % ( df['site'].values[0], df['alert'].values[0] + '%', ts.date(), ts + timedelta(hours=4)) df = q.GetDBDataFrame(query) ts = pd.to_datetime(df['timestamp'].values[0]) query = "DELETE FROM site_level_alert where site = '%s' and source = 'public' and alert = '%s'" % ( df['site'].values[0], df['alert'].values[0]) query += " and timestamp = '%s'" % ts cur.execute(query) db.commit() query = "DELETE FROM site_level_alert where site = '%s' and source = 'internal' and alert like '%s'" % ( df['site'].values[0], df['alert'].values[0] + '%') query += " and timestamp = '%s'" % ts cur.execute(query) db.commit() db.close() except: pass
def surficial(site, end, shift_datetime): if site == 'bto': surficial_site = 'bat' elif site == 'mng': surficial_site = 'man' elif site == 'png': surficial_site = 'pan' elif site == 'jor': surficial_site = 'pob' elif site == 'tga': surficial_site = 'tag' else: surficial_site = site query = "SELECT max(timestamp) AS ts FROM gndmeas " query += "WHERE site_id = '%s' " % surficial_site query += "AND timestamp >= '%s' " % shift_datetime query += "AND timestamp <= '%s' " % end ts = qdb.GetDBDataFrame(query)['ts'].values[0] if ts != None: surf_plot.PlotForEvent(surficial_site, ts)
def currentinv(withalert, df): site = withalert['site'].values[0] query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' AND source = 'public' AND alert != 'A0' ORDER BY timestamp DESC LIMIT 3" % site prev_PAlert = q.GetDBDataFrame(query) # one prev alert if len(prev_PAlert) == 1: start_monitor = pd.to_datetime(prev_PAlert.timestamp.values[0]) # two prev alert elif len(prev_PAlert) == 2: # one event with two prev alert if pd.to_datetime(prev_PAlert['timestamp'].values[0]) - pd.to_datetime( prev_PAlert['updateTS'].values[1]) <= timedelta(hours=0.5): start_monitor = pd.to_datetime(prev_PAlert['timestamp'].values[1]) else: start_monitor = pd.to_datetime(prev_PAlert['timestamp'].values[0]) # three prev alert else: if pd.to_datetime(prev_PAlert['timestamp'].values[0]) - pd.to_datetime( prev_PAlert['updateTS'].values[1]) <= timedelta(hours=0.5): # one event with three prev alert if pd.to_datetime( prev_PAlert['timestamp'].values[1]) - pd.to_datetime( prev_PAlert['updateTS'].values[2]) <= timedelta( hours=0.5): start_monitor = pd.to_datetime(prev_PAlert.timestamp.values[2]) # one event with two prev alert else: start_monitor = pd.to_datetime( prev_PAlert['timestamp'].values[1]) else: start_monitor = pd.to_datetime(prev_PAlert['timestamp'].values[0]) invdf = df[(df.site == site) & (df.timestamp >= start_monitor)] return invdf
def alert_toDB(df): query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' AND source = 'rain' AND updateTS <= '%s' ORDER BY updateTS DESC LIMIT 1" % ( df.site.values[0], end) df2 = q.GetDBDataFrame(query) if len(df2) == 0 or df2.alert.values[0] != df.alert.values[0]: df['updateTS'] = end engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) df.to_sql(name='site_level_alert', con=engine, if_exists='append', schema=q.Namedb, index=False) elif df2.alert.values[0] == df.alert.values[0]: db, cur = q.SenslopeDBConnect(q.Namedb) query = "UPDATE senslopedb.site_level_alert SET updateTS='%s' WHERE site = '%s' and source = 'rain' and alert = '%s' and timestamp = '%s'" % ( end, df2.site.values[0], df2.alert.values[0], pd.to_datetime(str(df2.timestamp.values[0]))) cur.execute(query) db.commit() db.close()
def trending_alertgen(trending_alert, monitoring, lgd, window, config): endTS = pd.to_datetime(trending_alert['timestamp'].values[0]) monitoring_vel = monitoring.vel[endTS - timedelta(3):endTS] monitoring_vel = monitoring_vel.reset_index().sort_values('ts', ascending=True) nodal_dv = monitoring_vel.groupby('id') alert = nodal_dv.apply(node_alert2, colname=monitoring.colprops.name, num_nodes=monitoring.colprops.nos, T_disp=config.io.t_disp, T_velL2=config.io.t_vell2, T_velL3=config.io.t_vell3, k_ac_ax=config.io.k_ac_ax, lastgooddata=lgd, window=window, config=config) alert = column_alert(alert, config.io.num_nodes_to_check, config.io.k_ac_ax) alert['timestamp'] = endTS palert = alert.loc[(alert.col_alert == 'L2') | (alert.col_alert == 'L3')] if len(palert) != 0: palert['site'] = monitoring.colprops.name palert = palert[[ 'timestamp', 'site', 'disp_alert', 'vel_alert', 'col_alert' ]].reset_index() palert = palert[[ 'timestamp', 'site', 'id', 'disp_alert', 'vel_alert', 'col_alert' ]] engine = create_engine('mysql://' + q.Userdb + ':' + q.Passdb + '@' + q.Hostdb + ':3306/' + q.Namedb) for i in palert.index: try: palert.loc[palert.index == i].to_sql( name='node_level_alert_noFilter', con=engine, if_exists='append', schema=q.Namedb, index=False) except: print 'data already written in senslopedb.node_level_alert_noFilter' alert['TNL'] = alert['col_alert'].values if len(palert) != 0: for i in palert['id'].values: query = "SELECT * FROM senslopedb.node_level_alert_noFilter WHERE site = '%s' and timestamp >= '%s' and id = %s" % ( monitoring.colprops.name, endTS - timedelta(hours=3), i) nodal_palertDF = q.GetDBDataFrame(query) if len(nodal_palertDF) >= 3: palert_index = alert.loc[alert.id == i].index[0] alert.loc[palert_index]['TNL'] = max( getmode(list(nodal_palertDF['col_alert'].values))) else: alert.loc[palert_index]['TNL'] = 'L0' not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus( 1).site == monitoring.colprops.name]['node'].values for i in not_working: alert = alert.loc[alert.id != i] if 'L3' in alert['TNL'].values: site_alert = 'L3' elif 'L2' in alert['TNL'].values: site_alert = 'L2' else: site_alert = min(getmode(list(alert['TNL'].values))) alert_index = trending_alert.loc[trending_alert.timestamp == endTS].index[0] trending_alert.loc[alert_index] = [ endTS, monitoring.colprops.name, 'sensor', site_alert ] return trending_alert
def gndmeas_id(df, gndmeas_table): gndmeas_table[df['timestamp'].values[0]][ df['crack_id'].values[0]] = df['meas'].values[0] return gndmeas_table def gndmeas(df, gndmeas_table): dfid = df.groupby('crack_id') gndmeas_table = dfid.apply(gndmeas_id, gndmeas_table=gndmeas_table) return gndmeas_table site = sys.argv[1] query = "SELECT * FROM senslopedb.gndmeas WHERE site_id = '%s' ORDER BY timestamp DESC LIMIT 200" % site df = q.GetDBDataFrame(query) df['timestamp'] = pd.to_datetime(df['timestamp']) last10ts = sorted(set(df.timestamp.values), reverse=True) if len(last10ts) > 10: last10ts = last10ts[0:10] df = df[df.timestamp.isin(last10ts)] dfts = df.groupby('timestamp') gndmeas_table = pd.DataFrame(columns=sorted(last10ts), index=sorted(set(df.crack_id.values))) gndmeas_table = dfts.apply(gndmeas, gndmeas_table=gndmeas_table) gndmeas_table = gndmeas_table.reset_index(level=1, drop=True).reset_index() gndmeas_table['crack_id'] = gndmeas_table['level_1'] gndmeas_table = gndmeas_table.set_index('crack_id')[sorted(last10ts)] gndmeas_table = gndmeas_table[len(gndmeas_table.index) -