#        print "Generating plots and alerts for:"
    
#        print colname

    
        # list of working nodes     
        node_list = range(1, num_nodes + 1)
        not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
        not_working_nodes = not_working['node'].values        
        for i in not_working_nodes:
            node_list.remove(i)
    
        # importing proc_monitoring csv file of current column to dataframe
        try:
            proc_monitoring=genproc.generate_proc(colname, num_nodes, seg_len)
#            print proc_monitoring
#            print "\n", colname
        except:
            print "     ",colname, "ERROR...missing/empty proc monitoring"
            continue
    
        # creating series lists per node
        xz_series_list,xy_series_list = create_series_list(proc_monitoring,monwin,colname,num_nodes)
    
        # create, fill and smooth dataframes from series lists
        xz=create_fill_smooth_df(xz_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
        xy=create_fill_smooth_df(xy_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
        
        # computing instantaneous velocity
        vel_xz, vel_xy = compute_node_inst_vel(xz,xy,roll_window_numpts)
Exemple #2
0
    # getting current column properties
    colname, num_nodes, seg_len = s.name, s.nos, s.seglen
    #    print colname, num_nodes, seg_len
    print 'RESULTS FOR SITE ' + colname
    # list of working nodes
    node_list = range(1, num_nodes + 1)
    not_working = node_status.loc[(node_status.site == colname)
                                  & (node_status.node <= num_nodes)]
    not_working_nodes = not_working['node'].values
    for i in not_working_nodes:
        node_list.remove(i)

    # importing proc_monitoring file of current column to dataframe
    try:
        proc_monitoring = genproc.generate_proc(colname, end)
#        print proc_monitoring
#        print "\n", colname
    except:
        print "     ", colname, "ERROR...missing/empty proc monitoring"
        continue

    # creating series lists per node
    xz_series_list, xy_series_list = create_series_list(
        proc_monitoring, monwin, colname, num_nodes)

    # create, fill and smooth dataframes from series lists
    xz = create_fill_smooth_df(xz_series_list, num_nodes, monwin,
                               roll_window_numpts, to_fill, to_smooth)
    xy = create_fill_smooth_df(xy_series_list, num_nodes, monwin,
                               roll_window_numpts, to_fill, to_smooth)
    last_col=last_col.name
    
    # getting current column properties
    colname,num_nodes,seg_len= s.name,s.nos,s.seglen
#    print colname, num_nodes, seg_len
    print 'RESULTS FOR SITE ' + colname
    # list of working nodes     
    node_list = range(1, num_nodes + 1)
    not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
    not_working_nodes = not_working['node'].values        
    for i in not_working_nodes:
        node_list.remove(i)

    # importing proc_monitoring file of current column to dataframe
    try:
        proc_monitoring=genproc.generate_proc(colname,end)
#        print proc_monitoring
#        print "\n", colname
    except:
        print "     ",colname, "ERROR...missing/empty proc monitoring"
        continue

    # creating series lists per node
    xz_series_list,xy_series_list = create_series_list(proc_monitoring,monwin,colname,num_nodes)

    # create, fill and smooth dataframes from series lists
    xz=create_fill_smooth_df(xz_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
    xy=create_fill_smooth_df(xy_series_list,num_nodes,monwin, roll_window_numpts,to_fill,to_smooth)
    
    # computing instantaneous velocity
    vel_xz, vel_xy = compute_node_inst_vel(xz,xy,roll_window_numpts)
    last_col = last_col.name

    # getting current column properties
    colname, num_nodes, seg_len = s.name, s.nos, s.seglen

    # list of working nodes
    node_list = range(1, num_nodes + 1)
    not_working = node_status.loc[(node_status.site == colname)
                                  & (node_status.node <= num_nodes)]
    not_working_nodes = not_working['node'].values
    for i in not_working_nodes:
        node_list.remove(i)

    # importing proc_monitoring file of current column to dataframe
    try:
        proc_monitoring = genproc.generate_proc(colname, num_nodes, seg_len)
        print proc_monitoring
    except:
        print "     ", colname, "ERROR...missing/empty proc monitoring"
        continue

    # creating series lists per node
    xz_series_list, xy_series_list = create_series_list(
        proc_monitoring, monwin, colname, num_nodes)

    # create, fill and smooth dataframes from series lists
    xz = create_fill_smooth_df(xz_series_list, num_nodes, monwin,
                               roll_window_numpts, to_fill, to_smooth)
    xy = create_fill_smooth_df(xy_series_list, num_nodes, monwin,
                               roll_window_numpts, to_fill, to_smooth)
def IntegratedAlert(site_col_props):
    
    # getting current column properties
    colname = site_col_props.name
    num_nodes = int(site_col_props.nos.values[0])
    seg_len = float(site_col_props.seglen.values[0])
    
    print "colname, num_nodes, seg_len", colname, num_nodes, seg_len
    
    # list of working nodes     
    node_list = range(1, num_nodes + 1)
    not_working = node_status.loc[(node_status.site == colname) & (node_status.node <= num_nodes)]
    not_working_nodes = not_working['node'].values
    for i in not_working_nodes:
        node_list.remove(i)

    # importing proc_monitoring file of current column to dataframe
    try:
        proc_monitoring=genproc.generate_proc(colname, num_nodes, seg_len)
        proc_monitoring.sort_index(ascending = True, inplace = True)
        print proc_monitoring
    except:
        print "     ",colname, "ERROR...missing/empty proc monitoring"
        proc_monitoring = pd.DataFrame({'ts': [end]*num_nodes, 'id': range(1, num_nodes + 1),
                                        'xz': [np.nan]*num_nodes, 'xy': [np.nan]*num_nodes})
        
    print "orig proc_monitoring", proc_monitoring

    nodes_with_val = set(proc_monitoring.id.values)
    all_nodes = set(range(1, num_nodes+1))
    no_val_nodes = list(all_nodes - nodes_with_val)
    
    node_fill = pd.DataFrame({'ts': [offsetstart]*len(no_val_nodes), 'id': no_val_nodes, 
                              'xz': [0]*len(no_val_nodes), 'xy': [0]*len(no_val_nodes)}).set_index('ts')

    proc_monitoring = proc_monitoring.append(node_fill)
    
    nodal_proc_monitoring = proc_monitoring.groupby('id')
    
    # fill and smoothen displacement
    filled_smoothened = nodal_proc_monitoring.apply(fill_smooth_df, offsetstart=offsetstart, end=end, roll_window_numpts=roll_window_numpts, to_smooth=to_smooth)
    filled_smoothened = filled_smoothened[['xz', 'xy']].reset_index()
    filled_smoothened['td'] = filled_smoothened.ts.values - filled_smoothened.ts.values[0]
    filled_smoothened['td'] = filled_smoothened['td'].apply(lambda x: x / np.timedelta64(1,'D'))

    nodal_filled_smoothened = filled_smoothened.groupby('id')    
    
    # xz and xy displacements, xz and xy velocities within monitoring window
    disp_vel = nodal_filled_smoothened.apply(node_inst_vel, roll_window_numpts=roll_window_numpts, start=start)
    disp_vel = disp_vel[['ts', 'xz', 'xy', 'vel_xz', 'vel_xy']].reset_index()
    disp_vel = disp_vel[['ts', 'id', 'xz', 'xy', 'vel_xz', 'vel_xy']]
    disp_vel = disp_vel.sort('ts', ascending=True)
    
    # absolute column position for col_pos_num with col_pos_interval
    colposdates = pd.date_range(end=end, freq=col_pos_interval,periods=col_pos_num, name='ts',closed=None)
    colpos_df = pd.DataFrame({'ts': colposdates, 'id': [num_nodes+1]*len(colposdates), 'xz': [0]*len(colposdates), 'xy': [0]*len(colposdates)})
    for colpos_ts in colposdates:
        colpos_df = colpos_df.append(disp_vel.loc[disp_vel.ts == colpos_ts, ['ts', 'id', 'xz', 'xy']])
    colpos_df['x'] = colpos_df['id'].apply(lambda x: (num_nodes + 1 - x) * seg_len)
    colpos_df = colpos_df.sort('id', ascending = False)

    colpos_dfts = colpos_df.groupby('ts')
    
    # relative column position
    cumsum_df = colpos_dfts.apply(col_pos, col_pos_end=end, col_pos_interval=col_pos_interval, col_pos_number=col_pos_num, num_nodes=num_nodes)
    
    # relative column position with zeroed initial
    nodal_cumsum_df = cumsum_df.groupby('id')
    cumsum_df_zeroed = nodal_cumsum_df.apply(zero_initial_cumsum)
    cumsum_df_zeroed = cumsum_df_zeroed[['ts','xz','xy','x','cs_xz','cs_xy','cs_xz_0','cs_xy_0']].reset_index()
    cumsum_df_zeroed = cumsum_df_zeroed[['ts','id','xz','xy','x','cs_xz','cs_xy','cs_xz_0','cs_xy_0']]
    
    # displacements and velocity with zeroed initial
    nodal_disp_vel = disp_vel.groupby('id')
    disp_vel_0off = nodal_disp_vel.apply(offsetzero_initial_dispvel, num_nodes=num_nodes, offset=0.15)
    disp_vel_0off = disp_vel_0off[['ts','xz','xy','vel_xz','vel_xy','xz_0','xy_0','xz_0off','xy_0off','vel_xz_0','vel_xy_0','vel_xz_0off','vel_xy_0off']].reset_index()
    disp_vel_0off = disp_vel_0off[['ts','id','xz','xy','vel_xz','vel_xy','xz_0','xy_0','xz_0off','xy_0off','vel_xz_0','vel_xy_0','vel_xz_0off','vel_xy_0off']]
    
    # Alert generation
    alert_out = alert_generation(disp_vel,colname,num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax,
                               num_nodes_to_check,end,proc_file_path,CSVFormat)        
                                                                                                                   
    print alert_out
    
    # without trending_node_alert (col_alerts to trending column)
    trending_col_alerts = []
    node_list = list(set(node_list) - (set(node_list) - set(filled_smoothened.id.values)))
    
    for n in node_list:
        trending_col_alerts += [pd.Series.tolist(alert_out.col_alert)[n-1]]
    
    # TRENDING COLUMN ALERT ONLY
    if trending_col_alerts.count('L3') != 0:
        if PrintTAlert:
            with open (output_file_path+textalert, 'ab') as t:
                t.write (colname + ":" + 'L3' + '\n')
        L3_alert.append(colname)
        alert_df.append((end,colname,'L3'))                
    elif trending_col_alerts.count('L2') != 0:
        if PrintTAlert:
            with open (output_file_path+textalert, 'ab') as t:
                t.write (colname + ":" + 'L2' + '\n')
        L2_alert.append(colname)
        alert_df.append((end,colname,'L2'))
    else:
        trending_col_alerts_count = Counter(trending_col_alerts)  
        if PrintTAlert:
            with open (output_file_path+textalert, 'ab') as t:
                t.write (colname + ":" + (trending_col_alerts_count.most_common(1)[0][0]) + '\n')
        if (trending_col_alerts_count.most_common(1)[0][0] == 'L0'):
            L0_alert.append(colname)
            alert_df.append((end,colname,'L0'))
        else:
            ND_alert.append(colname)
            alert_df.append((end,colname,'ND'))
    
    # writes sensor alerts in one row in webtrends
    if PrintWAlert:
        with open(output_file_path+webtrends, 'ab') as w:
                if trending_col_alerts.count('L3') != 0:
                    w.write ('L3' + ',')
                elif trending_col_alerts.count('L2') != 0:
                    w.write ('L2' + ',')
                elif (colname == 'sinb') or (colname == 'blcb'):
                    if trending_col_alerts.count('L0') > 0:
                        w.write ('L0' + ',')
                    else:
                        w.write ('ND' + ',')       
                else:
                    trending_col_alerts = Counter(trending_col_alerts)  
                    w.write ((trending_col_alerts.most_common(1)[0][0]) + ',')
                                
                if colname == last_col:
                           w.seek(-1, os.SEEK_END)
                           w.truncate()
                           w.write('\n')
    
    print alert_out
  
#    prints to csv: node alert, column alert and trending alert of sites with nd alert
    if PrintND:
        for colname in ND_alert:
            if os.path.exists(ND_path + colname + CSVFormat):
                alert_out[['node_alert', 'col_alert', 'trending_alert']].to_csv(ND_path + colname + CSVFormat, sep=',', header=False, mode='a')
            else:
                alert_out[['node_alert', 'col_alert', 'trending_alert']].to_csv(ND_path + colname + CSVFormat, sep=',', header=True, mode='w')

    #11. Plotting column positions
    if PrintColPos:
        plot_column_positions(cumsum_df,colname,end)
        plt.savefig(output_file_path+colname+'ColPos',
                    dpi=160, facecolor='w', edgecolor='w',orientation='landscape',mode='w')

    #12. Plotting displacement and velocity
    if PrintDispVel:
        plot_disp_vel(disp_vel_0off, colname, end)
        plt.savefig(output_file_path+colname+'Disp_vel',
                    dpi=160, facecolor='w', edgecolor='w',orientation='landscape',mode='w')
                    
    output = pd.DataFrame({'alert_df': [alert_df], 'ND_alert': [ND_alert], 'L0_alert': [L0_alert], 'L2_alert': [L2_alert], 'L3_alert': [L3_alert]})

    return output