def main(): while True: test_specific_time = raw_input('test specific time? (Y/N): ').lower() if test_specific_time == 'y' or test_specific_time == 'n': break if test_specific_time == 'y': while True: try: end = pd.to_datetime( raw_input( 'plot end timestamp (format: 2016-12-31 23:30): ')) break except: print 'invalid datetime format' continue else: end = datetime.now() while True: try: site = qdb.GetSensorList(raw_input('sensor name: '))[0].name[0:3] break except: print 'sensor name is not in the list' continue rain.main(site=site, end=end, alert_eval=False, plot=True, monitoring_end=False, positive_trigger=False)
def main(site, end): window, config = rtw.getwindow(end) monwinTS = pd.date_range(start=window.end - timedelta(hours=3), end=window.end, freq='30Min') trending_alert = pd.DataFrame({ 'site': [np.nan] * len(monwinTS), 'alert': [np.nan] * len(monwinTS), 'timestamp': monwinTS, 'source': [np.nan] * len(monwinTS) }) trending_alert = trending_alert[['timestamp', 'site', 'source', 'alert']] col = q.GetSensorList(site) monitoring = g.genproc(col[0], window, config, config.io.column_fix) lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name) trending_alertTS = trending_alert.groupby('timestamp') output = trending_alertTS.apply(trending_alertgen, window=window, config=config, monitoring=monitoring, lgd=lgd) site_level_alert = output.loc[output.timestamp == window.end] site_level_alert['updateTS'] = [window.end] return site_level_alert
def tsm_plot(tsm_name, end, shift_datetime): query = "SELECT max(timestamp) AS ts FROM %s" % tsm_name try: ts = pd.to_datetime(qdb.GetDBDataFrame(query)['ts'].values[0]) if ts < shift_datetime: return except: return if ts > end: ts = end window, config = rtw.getwindow(ts) col = qdb.GetSensorList(tsm_name) monitoring = gen.genproc(col[0], window, config, fixpoint=config.io.column_fix) plotter.main(monitoring, window, config, realtime=False, non_event_path=False)
def main(name='',custom_end = ''): if name == '': name = sys.argv[1].lower() start = datetime.now() print "=========================== {} {} =========================".format(str(name), custom_end) window,config = rtw.getwindow(end = custom_end ) col = q.GetSensorList(name) monitoring = g.genproc(col[0], window, config, config.io.column_fix) lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name) monitoring_vel = monitoring.vel[window.start:window.end] monitoring_vel = monitoring_vel.reset_index().sort_values('ts',ascending=True) nodal_dv = monitoring_vel.groupby('id') alert = nodal_dv.apply(node_alert2, colname=monitoring.colprops.name, num_nodes=monitoring.colprops.nos, T_disp=config.io.t_disp, T_velL2=config.io.t_vell2, T_velL3=config.io.t_vell3, k_ac_ax=config.io.k_ac_ax, lastgooddata=lgd,window=window,config=config) alert = column_alert(alert, config.io.num_nodes_to_check, config.io.k_ac_ax) not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus(1).site == name].node.values for i in not_working: alert = alert.loc[alert.id != i] if 'L3' in list(alert.col_alert.values): site_alert = 'L3' elif 'L2' in list(alert.col_alert.values): site_alert = 'L2' else: site_alert = min(getmode(list(alert.col_alert.values))) column_level_alert = pd.DataFrame({'timestamp': [window.end], 'site': [monitoring.colprops.name], 'source': ['sensor'], 'alert': [site_alert], 'updateTS': [window.end]}) print column_level_alert if site_alert in ('L2', 'L3'): A.main(monitoring.colprops.name,custom_end) else: alert_toDB(column_level_alert, 'column_level_alert', window) write_site_alert(monitoring.colprops.name, window) ####################### query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' and source = 'public' ORDER BY updateTS DESC LIMIT 1" %monitoring.colprops.name[0:3] public_alert = q.GetDBDataFrame(query) if public_alert.alert.values[0] != 'A0' or RoundTime(pd.to_datetime(public_alert.timestamp.values[0])) == RoundTime(window.end): plot_time = ['07:30:00', '19:30:00'] if str(window.end.time()) in plot_time: print "Plotter.main(monitoring, window, config)" elif RoundTime(pd.to_datetime(public_alert.timestamp.values[0])) == RoundTime(window.end): print "Plotter.main(monitoring, window, config)" ####################### print 'run time =', datetime.now()-start return column_level_alert,monitoring
def proc(func, colname, endTS, startTS, hour_interval, fixpoint): col = q.GetSensorList(colname) #end if endTS == '': window, config = rtw.getwindow() else: end = pd.to_datetime(endTS) end_year=end.year end_month=end.month end_day=end.day end_hour=end.hour end_minute=end.minute if end_minute<30:end_minute=0 else:end_minute=30 end=datetime.combine(date(end_year,end_month,end_day),time(end_hour,end_minute,0)) window, config = rtw.getwindow(end) if startTS != '': #start start = pd.to_datetime(startTS) start_year=start.year start_month=start.month start_day=start.day start_hour=start.hour start_minute=start.minute if start_minute<30:start_minute=0 else:start_minute=30 window.start=datetime.combine(date(start_year,start_month,start_day),time(start_hour,start_minute,0)) #offsetstart window.offsetstart = window.start - timedelta(days=(config.io.num_roll_window_ops*window.numpts-1)/48.) if func == 'colpos' or func == 'vcdgen': #colpos interval if hour_interval == '': if int((window.end-window.start).total_seconds() / (3600 * 24)) <= 5: hour_interval = 4 else: hour_interval = 24 config.io.col_pos_interval = str(hour_interval) + 'H' config.io.num_col_pos = int((window.end-window.start).total_seconds() / (3600 * hour_interval)) + 1 if func == 'displacement' or func == 'colpos': comp_vel = False else: comp_vel = True monitoring = g.genproc(col[0], window, config, fixpoint, comp_vel=comp_vel) num_nodes = monitoring.colprops.nos seg_len = monitoring.colprops.seglen if comp_vel == True: monitoring_vel = monitoring.vel.reset_index()[['ts', 'id', 'xz', 'xy', 'vel_xz', 'vel_xy']] else: monitoring_vel = monitoring.vel.reset_index()[['ts', 'id', 'xz', 'xy']] monitoring_vel = monitoring_vel.loc[(monitoring_vel.ts >= window.start)&(monitoring_vel.ts <= window.end)] return monitoring_vel, window, config, num_nodes, seg_len
def disp(date_end, sensor, date_start): #str.....sila lahat end = pd.to_datetime(date_end) #inputs specified time col = q.GetSensorList(sensor) #inputs the name of the sensor start = (date_start) #inputs monitoring window window, config = rtw.getwindow(end) window.start = pd.to_datetime(start) while True: start = date_start try: window.start = window.end - timedelta(int(start)) break except: try: window.start = pd.to_datetime(start) break except: print 'datetime format or integer only' continue window.offsetstart = window.start - timedelta( days=(config.io.num_roll_window_ops * window.numpts - 1) / 48.) #fixes the time (offsets) for the 3 day monitoring #somsdata = q.GetSomsData(sensor, window.offsetstart, end) column_fix = 'bottom' #i dont know the use config.io.column_fix = column_fix #i dont know the use #getdispdata = q.GetRawAccelData #i dont know yet!!!!!!!!!!!!!!!! monitoring = g.genproc(col[0], window, config, config.io.column_fix, comp_vel=True) monitoring_vel = monitoring.disp_vel.reset_index()[[ 'ts', 'id', 'depth', 'xz', 'xy', 'vel_xz', 'vel_xy' ]] #ColumnPlotter.py line 597 monitoring_vel.sort_values( ['ts', 'id'], inplace=True) #sorts values ts and id in plance not random! #monitoring_vel = monitoring_vel.sort_values(['ts','id'],inplace = True)same as monitoring_vel.sort_values(['ts','id'],inplace #monitoring_vel.to_csv("{} {} to {}.csv".format(col[0].name,end.strftime('%Y-%m-%d_%H-%M'),window.start.strftime('%Y-%m-%d_%H-%M')))#save the data in csv file return monitoring_vel
def sensor_data(date_end, sensor, date_start): end = pd.to_datetime(date_end) #inputs specified time col = q.GetSensorList(sensor) #inputs the name of the sensor start = (date_start) #inputs monitoring window window, config = rtw.getwindow(end) window.start = pd.to_datetime(start) while True: start = date_start try: window.start = window.end - timedelta(int(start)) break except: try: window.start = pd.to_datetime(start) break except: print 'datetime format or integer only' continue window.offsetstart = window.start - timedelta(days=(config.io.num_roll_window_ops*window.numpts-1)/48.) column_fix = 'bottom' config.io.column_fix = column_fix monitoring = g.genproc(col[0], window, config, config.io.column_fix, comp_vel = True) monitoring_vel = monitoring.disp_vel.reset_index()[['ts', 'id', 'depth', 'xz', 'xy', 'vel_xz', 'vel_xy']] monitoring_vel.sort_values(['ts','id'],inplace = True) return monitoring_vel #if __name__ == '__main__': # # start = '2017-01-01' # end = '2017-12-30' # site = 'laysam' # # df = soms_data(start,end,site)
def get_tsm_data(tsm_name, start, end, plot_type, node_lst): col = qdb.GetSensorList(tsm_name)[0] window, config = rtw.getwindow(pd.to_datetime(end)) window.start = pd.to_datetime(start) window.offsetstart = window.start - timedelta( days=(config.io.num_roll_window_ops * window.numpts - 1) / 48.) if plot_type == 'cml': config.io.to_smooth = 1 config.io.to_fill = 1 else: config.io.to_smooth = 1 config.io.to_fill = 1 monitoring = proc.genproc(col, window, config, 'bottom', comp_vel=False) df = monitoring.disp_vel.reset_index()[['ts', 'id', 'xz', 'xy']] df = df.loc[(df.ts >= window.start) & (df.ts <= window.end)] df = df.sort_values('ts') if plot_type == 'cml': xzd_plotoffset = 0 if node_lst != 'all': df = df[df.id.isin(node_lst)] df = plotter.cum_surf(df, xzd_plotoffset, col.nos) else: node_df = df.groupby('id', as_index=False) df = node_df.apply(zeroed, column='xz') df['zeroed_xz'] = df['zeroed_xz'] * 100 node_df = df.groupby('id', as_index=False) df = node_df.apply(zeroed, column='xy') df['zeroed_xy'] = df['zeroed_xy'] * 100 return df
PrintProc = io.io.printproc T_disp = io.io.t_disp T_velL2 = io.io.t_vell2 T_velL3 = io.io.t_vell3 k_ac_ax = io.io.k_ac_ax num_nodes_to_check = io.io.num_nodes_to_check colarrange = io.io.alerteval_colarrange.split(',') summary = pd.DataFrame() node_status = qdb.GetNodeStatus(1) last_target = 5 for i in range(0,last_target): try: sites,custom_end = ffd.aim(i) sensorlist = qdb.GetSensorList(sites) for s in sensorlist: last_col=sensorlist[-1:] last_col=last_col[0] last_col=last_col.name # getting current column properties colname,num_nodes,seg_len= s.name,s.nos,s.seglen # list of working nodes node_list = range(1, num_nodes + 1) not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)] not_working_nodes = not_working['node'].values for i in not_working_nodes: node_list.remove(i)
def main(name='', end='', end_mon=False): start = datetime.now() if name == '': name = sys.argv[1].lower() if end == '': try: end = pd.to_datetime(sys.argv[2]) if end > start + timedelta(hours=0.5): print 'invalid timestamp' return except: end = datetime.now() else: end = pd.to_datetime(end) window, config = rtw.getwindow(end) col = q.GetSensorList(name) monitoring = g.genproc(col[0], window, config, config.io.column_fix) lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name) monitoring_vel = monitoring.disp_vel[window.start:window.end] monitoring_vel = monitoring_vel.reset_index().sort_values('ts', ascending=True) nodal_dv = monitoring_vel.groupby('id') alert = nodal_dv.apply(node_alert2, colname=monitoring.colprops.name, num_nodes=monitoring.colprops.nos, T_disp=config.io.t_disp, T_velL2=config.io.t_vell2, T_velL3=config.io.t_vell3, k_ac_ax=config.io.k_ac_ax, lastgooddata=lgd, window=window, config=config) alert['col_alert'] = -1 col_alert = pd.DataFrame({ 'id': range(1, monitoring.colprops.nos + 1), 'col_alert': [-1] * monitoring.colprops.nos }) node_col_alert = col_alert.groupby('id', as_index=False) node_col_alert.apply(column_alert, alert=alert, num_nodes_to_check=config.io.num_nodes_to_check, k_ac_ax=config.io.k_ac_ax, T_velL2=config.io.t_vell2, T_velL3=config.io.t_vell3) alert['node_alert'] = alert['node_alert'].map({ -1: 'ND', 0: 'L0', 1: 'L2', 2: 'L3' }) alert['col_alert'] = alert['col_alert'].map({ -1: 'ND', 0: 'L0', 1: 'L2', 2: 'L3' }) not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus(1).site == name].node.values for i in not_working: alert = alert.loc[alert.id != i] if 'L3' in list(alert.col_alert.values): site_alert = 'L3' elif 'L2' in list(alert.col_alert.values): site_alert = 'L2' else: site_alert = min(getmode(list(alert.col_alert.values))) column_level_alert = pd.DataFrame({ 'timestamp': [window.end], 'site': [monitoring.colprops.name], 'source': ['noadjfilt'], 'alert': [site_alert], 'updateTS': [window.end] }) if site_alert in ('L2', 'L3'): column_level_alert = A.main(monitoring.colprops.name, window.end) alert_toDB(column_level_alert, 'column_level_alert', window) write_site_alert(monitoring.colprops.name, window) print column_level_alert print 'run time =', datetime.now() - start return column_level_alert
(158, 218, 229)] for i in range(len(tableau20)): r, green, b = tableau20[i] tableau20[i] = (r / 255., green / 255., b / 255.) ########################################################## ###INPUTS colname = 'pngta' node = 8 axis = 'xz' k = 3 #degree of spline c = 1 #factor of error #Step 1: Get dataframe for xz and xy using RealTimePlotter Code col = q.GetSensorList(colname) start = '2017-01-09 7:00:00' end = '2017-01-10 09:00:00' window, config = rtw.getwindow(pd.to_datetime(end)) config.io.to_smooth = 0 window.start = pd.to_datetime(start).to_datetime() window.numpts = int(7) window.offsetstart = window.start - timedelta( days=(config.io.num_roll_window_ops * window.numpts - 1) / 48.) out_path = 'C:\Users\Win8\Documents\Dynaslope\\Data Analysis\\Filters\\Acceleration Velocity\\' out_path = out_path + 'Underground\k {} Gaussian num_pts {}\\{}\\{}\\'.format( k, window.numpts, colname, str(node))
def main(name='', end=datetime.now(), end_mon=False): if name == '': name = sys.argv[1].lower() window, config = rtw.getwindow(end) col = q.GetSensorList(name) monitoring = g.genproc(col[0], window, config, config.io.column_fix) lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name) monitoring_vel = monitoring.vel[window.start:window.end] monitoring_vel = monitoring_vel.reset_index().sort_values('ts', ascending=True) nodal_dv = monitoring_vel.groupby('id') alert = nodal_dv.apply(node_alert2, colname=monitoring.colprops.name, num_nodes=monitoring.colprops.nos, T_disp=config.io.t_disp, T_velL2=config.io.t_vell2, T_velL3=config.io.t_vell3, k_ac_ax=config.io.k_ac_ax, lastgooddata=lgd, window=window, config=config) alert = column_alert(alert, config.io.num_nodes_to_check, config.io.k_ac_ax) not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus(1).site == name].node.values for i in not_working: alert = alert.loc[alert.id != i] if 'L3' in list(alert.col_alert.values): site_alert = 'L3' elif 'L2' in list(alert.col_alert.values): site_alert = 'L2' else: site_alert = min(getmode(list(alert.col_alert.values))) column_level_alert = pd.DataFrame({ 'timestamp': [window.end], 'site': [monitoring.colprops.name], 'source': ['sensor'], 'alert': [site_alert], 'updateTS': [window.end] }) if site_alert in ('L2', 'L3'): column_level_alert = A.main(monitoring.colprops.name, window.end) alert_toDB(column_level_alert, 'column_level_alert', window) print column_level_alert write_site_alert(monitoring.colprops.name, window) ####################### if monitoring.colprops.name == 'mesta': colname = 'msu' elif monitoring.colprops.name == 'messb': colname = 'msl' else: colname = monitoring.colprops.name[0:3] query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' and source = 'public' and timestamp <= '%s' and updateTS >= '%s' ORDER BY updateTS DESC LIMIT 1" % ( colname, window.end, window.end - timedelta(hours=0.5)) public_alert = q.GetDBDataFrame(query) if public_alert.alert.values[0] != 'A0': plot_time = ['07:30:00', '19:30:00'] if str(window.end.time()) in plot_time or end_mon: plotter.main(monitoring, window, config, plotvel_start=window.end - timedelta(hours=3), plotvel_end=window.end, realtime=False) elif RoundTime(pd.to_datetime( public_alert.timestamp.values[0])) == RoundTime(window.end): plotter.main(monitoring, window, config, plotvel_start=window.end - timedelta(hours=3), plotvel_end=window.end, realtime=False) ####################### return column_level_alert
def worker(first_target, last_target): #load all global variables? summary = pd.DataFrame() s_f = pd.DataFrame() s_a = pd.DataFrame() io = cfg.config() num_roll_window_ops = io.io.num_roll_window_ops roll_window_length = io.io.roll_window_length data_dt = io.io.data_dt rt_window_length = io.io.rt_window_length roll_window_numpts = int(1 + roll_window_length / data_dt) col_pos_interval = io.io.col_pos_interval col_pos_num = io.io.num_col_pos to_fill = io.io.to_fill to_smooth = io.io.to_smooth # output_path = (__file__) # output_file_path = (__file__) # proc_file_path = (__file__) CSVFormat = '.csv' # PrintProc = io.io.printproc T_disp = io.io.t_disp T_velL2 = io.io.t_vell2 T_velL3 = io.io.t_vell3 k_ac_ax = io.io.k_ac_ax num_nodes_to_check = io.io.num_nodes_to_check colarrange = io.io.alerteval_colarrange.split(',') node_status = qdb.GetNodeStatus(1) for i in range(first_target, last_target): # try: sites, custom_end = ffd.aim(i) sensorlist = qdb.GetSensorList(sites) for s in sensorlist: last_col = sensorlist[-1:] last_col = last_col[0] last_col = last_col.name # getting current column properties colname, num_nodes, seg_len = s.name, s.nos, s.seglen # list of working nodes node_list = range(1, num_nodes + 1) not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)] not_working_nodes = not_working['node'].values for i in not_working_nodes: node_list.remove(i) proc_monitoring, monwin = generate_proc(colname, num_nodes, seg_len, custom_end, roll_window_length, data_dt, rt_window_length, num_roll_window_ops) xz_series_list, xy_series_list = create_series_list( proc_monitoring, monwin, colname, num_nodes) # print "create_series_list tapos na" # create, fill and smooth dataframes from series lists xz = create_fill_smooth_df(xz_series_list, num_nodes, monwin, roll_window_numpts, to_fill, to_smooth) xy = create_fill_smooth_df(xy_series_list, num_nodes, monwin, roll_window_numpts, to_fill, to_smooth) # computing instantaneous velocity vel_xz, vel_xy = compute_node_inst_vel(xz, xy, roll_window_numpts) # computing cumulative displacements cs_x, cs_xz, cs_xy = compute_col_pos(xz, xy, monwin.index[-1], col_pos_interval, col_pos_num, seg_len) # processing dataframes for output xz, xy, xz_0off, xy_0off, vel_xz, vel_xy, vel_xz_0off, vel_xy_0off, cs_x, cs_xz, cs_xy, cs_xz_0, cs_xy_0 = df_to_out( colname, xz, xy, vel_xz, vel_xy, cs_x, cs_xz, cs_xy, # proc_file_path, CSVFormat) # Alert generation # alert_out=alert_generation(colname,xz,xy,vel_xz,vel_xy,num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax, # num_nodes_to_check,custom_end,CSVFormat,colarrange) alert_out = alert_generation(colname, xz, xy, vel_xz, vel_xy, num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax, num_nodes_to_check, custom_end, CSVFormat, colarrange) alert_out = alert_out.reset_index(level=['id']) alert_out = alert_out[[ 'id', 'disp_alert', 'vel_alert', 'node_alert', 'col_alert' ]] alert_out = alert_out[(alert_out['vel_alert'] > 0) | (alert_out.node_alert == 'l2')] alert_out = alert_out[alert_out.id == 1] alert_out['site'] = sites summary = pd.concat((summary, alert_out), axis=0) # except: # print "Error recreating alarm." # continue print "--------------------Filtering chenes----------------------" print "--------------------Store yung mga nafilter----------------------" for j in range(0, len(summary)): # try: sites, custom_end = time_site(j, summary) # print "custom_end -------------> %s" %str(custom_end) sensorlist = qdb.GetSensorList(sites) for s in sensorlist: last_col = sensorlist[-1:] last_col = last_col[0] last_col = last_col.name # getting current column properties colname, num_nodes, seg_len = s.name, s.nos, s.seglen # list of working nodes node_list = range(1, num_nodes + 1) not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)] not_working_nodes = not_working['node'].values for i in not_working_nodes: node_list.remove(i) # proc_monitoring,monwin=generate_proc(colname, num_nodes, seg_len, custom_end,f=True) proc_monitoring, monwin = generate_proc(colname, num_nodes, seg_len, custom_end, roll_window_length, data_dt, rt_window_length, num_roll_window_ops, filt=True) xz_series_list, xy_series_list = create_series_list( proc_monitoring, monwin, colname, num_nodes) xz = create_fill_smooth_df(xz_series_list, num_nodes, monwin, roll_window_numpts, to_fill, to_smooth) xy = create_fill_smooth_df(xy_series_list, num_nodes, monwin, roll_window_numpts, to_fill, to_smooth) # computing instantaneous velocity vel_xz, vel_xy = compute_node_inst_vel(xz, xy, roll_window_numpts) # computing cumulative displacements cs_x, cs_xz, cs_xy = compute_col_pos(xz, xy, monwin.index[-1], col_pos_interval, col_pos_num, seg_len) # processing dataframes for output xz, xy, xz_0off, xy_0off, vel_xz, vel_xy, vel_xz_0off, vel_xy_0off, cs_x, cs_xz, cs_xy, cs_xz_0, cs_xy_0 = df_to_out( colname, xz, xy, vel_xz, vel_xy, cs_x, cs_xz, cs_xy, # proc_file_path, CSVFormat) # Alert generation alert_out = alert_generation(colname, xz, xy, vel_xz, vel_xy, num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax, num_nodes_to_check, custom_end, CSVFormat, colarrange) # print alert_out alert_out = alert_out.reset_index(level=['id']) a_out = alert_out.copy() a_out = a_out[[ 'id', 'disp_alert', 'vel_alert', 'node_alert', 'col_alert' ]] a_out = a_out[(a_out['vel_alert'] < 1.0) | (a_out.node_alert == 'l0')] a_out = a_out[a_out.id == 1] a_out['site'] = sites s_f = pd.concat((s_f, a_out), axis=0) b_out = alert_out.copy() b_out = b_out[[ 'id', 'disp_alert', 'vel_alert', 'node_alert', 'col_alert' ]] b_out = b_out[(b_out['vel_alert'] > 0.0) | (b_out.node_alert == 'l2')] b_out = b_out[b_out.id == 1] b_out['site'] = sites s_a = pd.concat((s_a, b_out), axis=0) # except: # print "Error." # continue print "################# Drawing! Dahil drawing ka! ##################" print "################# Idrawing lahat ng nafilter! ##################" for k in range(0, len(s_f)): try: sites, custom_end = time_site(k, s_f) ce = custom_end.strftime("%y_%m_%d__%H_%M") fname = "FILTERED_" + str(sites) + "_" + ce + "_049_049" sensorlist = qdb.GetSensorList(sites) for s in sensorlist: last_col = sensorlist[-1:] last_col = last_col[0] last_col = last_col.name # getting current column properties colname, num_nodes, seg_len = s.name, s.nos, s.seglen # list of working nodes # node_list = range(1, num_nodes + 1) # not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)] # not_working_nodes = not_working['node'].values # for i in not_working_nodes: # node_list.remove(i) # importing proc_monitoring file of current column to dataframe # try: # print "proc_monitoring here: " proc_monitoring = generate_proc(colname, num_nodes, seg_len, custom_end, roll_window_length, data_dt, rt_window_length, num_roll_window_ops, filt=True, for_plots=True) # print proc_monitoring proc_monitoring = proc_monitoring[proc_monitoring.id == 1] ffd.plotter(proc_monitoring, fname=fname) except: print "Error plotting Filtered." for k in range(0, len(s_a)): try: sites, custom_end = time_site(k, s_a) ce = custom_end.strftime("%y_%m_%d__%H_%M") sensorlist = qdb.GetSensorList(sites) for s in sensorlist: last_col = sensorlist[-1:] last_col = last_col[0] last_col = last_col.name # getting current column properties colname, num_nodes, seg_len = s.name, s.nos, s.seglen # list of working nodes # node_list = range(1, num_nodes + 1) # not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)] # not_working_nodes = not_working['node'].values # for i in not_working_nodes: # node_list.remove(i) # importing proc_monitoring file of current column to dataframe # try: # print "proc_monitoring here: " proc_monitoring = generate_proc(colname, num_nodes, seg_len, custom_end, roll_window_length, data_dt, rt_window_length, num_roll_window_ops, f=True, for_plots=True) # print proc_monitoring proc_monitoring = proc_monitoring[proc_monitoring.id == 1] ffd.plotter(proc_monitoring, fname=fname) except: print "Error plotting Alarms."
def GenLsbAlerts(): sites = qs.GetSensorList() alertTxt = "" alertTxt2 = "" print "Getting lsb alerts" for site in sites: for nid in range(1, site.nos + 1): df = ofd.getFilteredData( isCmd=False, inSite=site.name, inNode=nid, inStart=(dt.now() - td(7)).strftime("%y/%m/%d %H:%M:%S")) isDFempty = df.empty if isDFempty == True: qs.PrintOut('No Data Available... for %s %s' % (site.name, nid)) continue df = df.set_index(['ts']) df2 = df.copy() dfa = [] try: df3 = df2.resample('30Min').fillna(method='pad') except pd.core.groupby.DataError: #print "No data to resample %s %s" % (site.name, nid) continue dfv = df3 - df3.shift(12) if len(dfa) == 0: dfa = dfv.copy() else: dfa = dfa.append(dfv) window = 48 dfarm = pd.rolling_mean(dfa, window) dfarm = dfarm[dfarm.index > dt.now() - td(1)] if (((abs(dfarm.x) > 0.25) | (abs(dfarm.y) > 0.25) | (abs(dfarm.z) > 1.0)).any()): ins = "%s,%s" % (site.name, nid) alertTxt += ins alertTxt2 += ins print ins + '\t', if ((abs(dfarm.x) > 0.25).any()): print 'x', alertTxt += ',1' alertTxt2 += ',' + repr(max(abs(dfarm.x))) else: alertTxt += ',0' alertTxt2 += ',0' if ((abs(dfarm.y) > 0.25).any()): print 'y', alertTxt += ',1' alertTxt2 += ',' + repr(max(abs(dfarm.y))) else: alertTxt += ',0' alertTxt2 += ',0' if ((abs(dfarm.z) > 1.0).any()): print 'z', alertTxt += ',1' alertTxt2 += ',' + repr(max(abs(dfarm.z))) else: alertTxt += ',0' alertTxt2 += ',0' print '' alertTxt += '\n' alertTxt2 += '\n' f = open('lsbalerts.csv', 'w') f.write(alertTxt) f.close() f = open('lsbalerts2.csv', 'w') f.write(alertTxt2) f.close()
def mon_main(): while True: plot_all_data = raw_input( 'plot from start to end of data? (Y/N): ').lower() if plot_all_data == 'y' or plot_all_data == 'n': break # plots segment of data if plot_all_data == 'n': while True: monitoring_window = raw_input( 'plot with 3 day monitoring window? (Y/N): ').lower() if monitoring_window == 'y' or monitoring_window == 'n': break # plots with 3 day monitoring window if monitoring_window == 'y': while True: try: col = q.GetSensorList(raw_input('sensor name: ')) break except: print 'sensor name is not in the list' continue while True: test_specific_time = raw_input( 'test specific time? (Y/N): ').lower() if test_specific_time == 'y' or test_specific_time == 'n': break while True: try: if test_specific_time == 'y': end = pd.to_datetime( raw_input( 'plot end timestamp (format: 2016-12-31 23:30): ' )) window, config = rtw.getwindow(end) elif test_specific_time == 'n': window, config = rtw.getwindow() break except: print 'invalid datetime format' continue column_fix = raw_input( 'column fix for colpos (top/bottom); default for monitoring is fix bottom: ' ).lower() if column_fix != 'top': column_fix = 'bottom' config.io.column_fix = column_fix monitoring = g.genproc(col[0], window, config, config.io.column_fix, realtime=True) plotter.main(monitoring, window, config, plotvel_start=window.end - timedelta(hours=3), plotvel_end=window.end) #, plot_inc=False) # plots with customizable monitoring window elif monitoring_window == 'n': while True: try: col = q.GetSensorList(raw_input('sensor name: ')) break except: print 'sensor name is not in the list' continue while True: try: end = pd.to_datetime( raw_input( 'plot end timestamp (format: 2016-12-31 23:30): ')) window, config = rtw.getwindow(end) break except: print 'invalid datetime format' continue while True: start = raw_input( 'monitoring window (in days) or datetime (format: 2016-12-31 23:30): ' ) try: window.start = window.end - timedelta(int(start)) break except: try: window.start = pd.to_datetime(start) break except: print 'datetime format or integer only' continue window.offsetstart = window.start - timedelta( days=(config.io.num_roll_window_ops * window.numpts - 1) / 48.) while True: try: col_pos_interval = int( raw_input( 'interval between column position dates, in days: ' )) break except: print 'enter an integer' continue config.io.col_pos_interval = str(col_pos_interval) + 'D' config.io.num_col_pos = int((window.end - window.start).days / col_pos_interval + 1) column_fix = raw_input( 'column fix for colpos (top/bottom); default for monitoring is fix bottom: ' ).lower() if column_fix != 'top': column_fix = 'bottom' config.io.column_fix = column_fix while True: show_all_legend = raw_input( 'show all legend in column position plot? (Y/N): ').lower( ) if show_all_legend == 'y' or show_all_legend == 'n': break if show_all_legend == 'y': show_part_legend = False elif show_all_legend == 'n': while True: try: show_part_legend = int( raw_input('every nth legend to show: ')) if show_part_legend <= config.io.num_col_pos: break else: print 'integer should be less than number of column position dates to plot:', config.io.num_col_pos continue except: print 'enter an integer' continue while True: plotvel = raw_input('plot velocity? (Y/N): ').lower() if plotvel == 'y' or plotvel == 'n': break if plotvel == 'y': plotvel = True else: plotvel = False monitoring = g.genproc(col[0], window, config, config.io.column_fix, comp_vel=plotvel) plotter.main(monitoring, window, config, plotvel=plotvel, show_part_legend=show_part_legend, plotvel_end=window.end, plotvel_start=window.start, plot_inc=False, comp_vel=plotvel) # plots from start to end of data elif plot_all_data == 'y': while True: try: col = q.GetSensorList(raw_input('sensor name: ')) break except: print 'sensor name is not in the list' continue while True: try: col_pos_interval = int( raw_input( 'interval between column position dates, in days: ')) break except: print 'enter an integer' continue query = "(SELECT * FROM senslopedb.%s where timestamp > '2010-01-01 00:00' ORDER BY timestamp LIMIT 1)" % col[ 0].name query += " UNION ALL" query += " (SELECT * FROM senslopedb.%s ORDER BY timestamp DESC LIMIT 1)" % col[ 0].name start_end = q.GetDBDataFrame(query) end = pd.to_datetime(start_end['timestamp'].values[1]) window, config = rtw.getwindow(end) start_dataTS = pd.to_datetime(start_end['timestamp'].values[0]) start_dataTS_Year = start_dataTS.year start_dataTS_month = start_dataTS.month start_dataTS_day = start_dataTS.day start_dataTS_hour = start_dataTS.hour start_dataTS_minute = start_dataTS.minute if start_dataTS_minute < 30: start_dataTS_minute = 0 else: start_dataTS_minute = 30 window.offsetstart = datetime.combine( date(start_dataTS_Year, start_dataTS_month, start_dataTS_day), time(start_dataTS_hour, start_dataTS_minute, 0)) window.numpts = int(1 + config.io.roll_window_length / config.io.data_dt) window.start = window.offsetstart + timedelta( days=(config.io.num_roll_window_ops * window.numpts - 1) / 48.) config.io.col_pos_interval = str(col_pos_interval) + 'D' config.io.num_col_pos = int((window.end - window.start).days / col_pos_interval + 1) column_fix = raw_input( 'column fix for colpos (top/bottom); default for monitoring is fix bottom: ' ).lower() if column_fix != 'top': column_fix = 'bottom' config.io.column_fix = column_fix while True: show_all_legend = raw_input( 'show all legend in column position plot? (Y/N): ').lower() if show_all_legend == 'y' or show_all_legend == 'n': break if show_all_legend == 'y': show_part_legend = False elif show_all_legend == 'n': while True: try: show_part_legend = int( raw_input('every nth legend to show: ')) if show_part_legend <= config.io.num_col_pos: break else: print 'integer should be less than number of column position dates to plot:', config.io.num_col_pos continue except: print 'enter an integer' continue while True: plotvel = raw_input('plot velocity? (Y/N): ').lower() if plotvel == 'y' or plotvel == 'n': break if plotvel == 'y': plotvel = True else: plotvel = False monitoring = g.genproc(col[0], window, config, config.io.column_fix, comp_vel=plotvel) plotter.main(monitoring, window, config, plotvel=plotvel, show_part_legend=show_part_legend, plot_inc=False, comp_vel=plotvel)