示例#1
0
def set_monitoring_window(roll_window_length, data_dt, rt_window_length,
                          num_roll_window_ops, end):

    ##DESCRIPTION:
    ##returns number of data points per rolling window, endpoint of interval, starting point of interval, time interval for real-time monitoring, monitoring window dataframe

    ##INPUT:
    ##roll_window_length; float; length of rolling/moving window operations, in hours
    ##data_dt; float; time interval between data points, in hours
    ##rt_window_length; float; length of real-time monitoring window, in days
    ##num_roll_window_ops

    ##OUTPUT:
    ##roll_window_numpts, end, start, offsetstart, monwin

    roll_window_numpts = int(1 + roll_window_length / data_dt)
    end, start, offsetstart = gf.get_rt_window(rt_window_length,
                                               roll_window_numpts,
                                               num_roll_window_ops, end)
    monwin_time = pd.date_range(start=offsetstart,
                                end=end,
                                freq='30Min',
                                name='ts',
                                closed=None)
    monwin = pd.DataFrame(data=np.nan * np.ones(len(monwin_time)),
                          index=monwin_time)
    return roll_window_numpts, end, start, offsetstart, monwin
示例#2
0
def set_monitoring_window(roll_window_length,data_dt,rt_window_length,num_roll_window_ops):

    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart=gf.get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops)
    monwin_time=pd.date_range(start=offsetstart, end=end, freq='30Min',name='ts', closed=None)
    monwin=pd.DataFrame(data=np.nan*np.ones(len(monwin_time)), index=monwin_time)
    return roll_window_numpts, end, start, offsetstart, monwin
def generate_proc(site):

    print rt_window_length
    
    #1. setting date boundaries for real-time monitoring window
    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart=gf.get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops)
    sensorlist=GetSensorList()

    # generating proc monitoring data for each site
    print "Generating PROC monitoring data for:"
    for s in sensorlist:
        
        if site == s.name:
        
            #2. getting current column properties
            colname,num_nodes,seg_len= s.name,s.nos,s.seglen
            print colname
            print num_nodes
            print seg_len
                
            #3. getting accelerometer data for site 'colname'
            monitoring=GetRawAccelData(colname,offsetstart)
    
            #4. evaluating which data needs to be filtered
            try:
                monitoring=applyFilters(monitoring)
                LastGoodData=GetLastGoodData(monitoring,num_nodes)
                PushLastGoodData(LastGoodData,colname)
                LastGoodData = GetLastGoodDataFromDb(colname)
                print 'Done'
            except:
                LastGoodData = GetLastGoodDataFromDb(colname)
                print 'error'
            if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData"
    
            #5. extracting last data outside monitoring window
            LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)]
    
            #6. appending LastGoodData to monitoring
            monitoring=monitoring.append(LastGoodData)
            
            #7. replacing date of data outside monitoring window with first date of monitoring window
            monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart
    
            #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
            monitoring['xz'],monitoring['xy']=gf.accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values)
            
            #9. removing unnecessary columns x,y,z
            monitoring=monitoring.drop(['x','y','z'],axis=1)
    
            #10. setting ts as index
    #        monitoring['id']=monitoring.index.values
            monitoring=monitoring.set_index('ts')
    
            #11. reordering columns
            monitoring=monitoring[['id','xz','xy']]
            
            #12. saving proc monitoring data        
            return monitoring
def set_monitoring_window(roll_window_length,data_dt,rt_window_length,num_roll_window_ops):
    
    ##DESCRIPTION:    
    ##returns number of data points per rolling window, endpoint of interval, starting point of interval, time interval for real-time monitoring, monitoring window dataframe
    
    ##INPUT:
    ##roll_window_length; float; length of rolling/moving window operations, in hours
    ##data_dt; float; time interval between data points, in hours    
    ##rt_window_length; float; length of real-time monitoring window, in days
    ##num_roll_window_ops
    
    ##OUTPUT:
    ##roll_window_numpts, end, start, offsetstart, monwin
    
    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart=gf.get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops)
    monwin_time=pd.date_range(start=offsetstart, end=end, freq='30Min',name='ts', closed=None)
    monwin=pd.DataFrame(data=np.nan*np.ones(len(monwin_time)), index=monwin_time)
    return roll_window_numpts, end, start, offsetstart, monwin
示例#5
0
#INPUT/OUTPUT FILES

#file headers
colarrange = cfg.get('I/O','alerteval_colarrange').split(',')
TestSpecificTime = cfg.getboolean('I/O', 'test_specific_time')



if TestSpecificTime:
    end = pd.to_datetime(cfg.get('I/O','use_specific_time'))
else:
    end = datetime.now()


roll_window_numpts=int(1+roll_window_length/data_dt)
end, start, offsetstart=gf.get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops,end)
valid_data = end - timedelta(hours=3)



def node_alert(colname, xz_tilt, xy_tilt, xz_vel, xy_vel, num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax):

    #DESCRIPTION
    #Evaluates node-level alerts from node tilt and velocity data

    #INPUT
    #xz_tilt,xy_tilt, xz_vel, xy_vel:   Pandas DataFrame objects, with length equal to real-time window size, and columns for timestamp and individual node values
    #num_nodes:                         integer; number of nodes in a column
    #T_disp, TvelL2, TvelL3:            floats; threshold values for displacement, and velocities correspoding to alert levels L2 and L3
    #k_ac_ax:                           float; minimum value of (minimum velocity / maximum velocity) required to consider movement as valid
def generate_proc(site,end):
    
    #1. setting date boundaries for real-time monitoring window
    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart=gf.get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops,end=end)
    sensorlist=GetSensorList()

    # generating proc monitoring data for each site
#    print "Generating PROC monitoring data for:"
    for s in sensorlist:
        
        if site == s.name:
        
            #2. getting current column properties
            colname,num_nodes,seg_len= s.name,s.nos,s.seglen
#            print colname
#            print num_nodes
#            print seg_len
                
            #3. getting accelerometer data for site 'colname'
            monitoring=GetRawAccelData(colname,offsetstart)
            monitoring = monitoring.loc[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)]
             
            #3.1 identify the node ids with no data at start of monitoring window
            NodesNoInitVal=GetNodesWithNoInitialData(monitoring,num_nodes,offsetstart)
            
            #4: get last good data prior to the monitoring window (LGDPM)
            lgdpm = pd.DataFrame()
            for node in NodesNoInitVal:
                temp = GetSingleLGDPM(site, node, offsetstart.strftime("%Y-%m-%d %H:%M"))
                lgdpm = lgdpm.append(temp,ignore_index=True)
 
            #5 TODO: Resample the dataframe together with the LGDOM
            monitoring=monitoring.append(lgdpm)
    
            #6. evaluating which data needs to be filtered
            try:
                monitoring=applyFilters(monitoring)		
                LastGoodData=GetLastGoodData(monitoring,num_nodes)		
                PushLastGoodData(LastGoodData,colname)		
                LastGoodData = GetLastGoodDataFromDb(colname)		
#                print 'Done'		
            except:		
                LastGoodData = GetLastGoodDataFromDb(colname)		
                print 'error'		
		
            if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData"		
    		
            #5. extracting last data outside monitoring window		
            LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)]		
    		
            #6. appending LastGoodData to monitoring		
            monitoring=monitoring.append(LastGoodData)    

            
            #7. replacing date of data outside monitoring window with first date of monitoring window
            monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart
    
            #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
            monitoring['xz'],monitoring['xy']=gf.accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values)
            
            #9. removing unnecessary columns x,y,z
            monitoring=monitoring.drop(['x','y','z'],axis=1)
            monitoring = monitoring.drop_duplicates(['ts', 'id'])
    
            #10. setting ts as index
    #        monitoring['id']=monitoring.index.values
            monitoring=monitoring.set_index('ts')
    
            #11. reordering columns
            monitoring=monitoring[['id','xz','xy']]
            
            #12. saving proc monitoring data
            if PrintProc:
                monitoring.to_csv(proc_monitoring_path+colname+proc_monitoring_file,sep=',', header=False,mode='w')
                
            return monitoring
示例#7
0
def generate_proc():
    #MAIN

    #1. setting date boundaries for real-time monitoring window
    roll_window_numpts = int(1 + roll_window_length / data_dt)
    end, start, offsetstart = gf.get_rt_window(rt_window_length,
                                               roll_window_numpts,
                                               num_roll_window_ops)

    #2. getting all column properties
    sensors = pd.read_csv(columnproperties_path + columnproperties_file,
                          names=columnproperties_headers,
                          index_col=None)

    ##    print "Generating PROC monitoring data for:"
    for s in range(len(sensors)):

        #3. getting current column properties
        colname, num_nodes, seg_len = sensors['colname'][s], sensors[
            'num_nodes'][s], sensors['seg_len'][s]

        ##    print "\nDATA for ",colname," as of ", end.strftime("%Y-%m-%d %H:%M")

        try:
            #4. importing monitoring csv file of current column to dataframe
            monitoring = pd.read_csv(monitoring_path + colname +
                                     monitoring_file,
                                     names=monitoring_file_headers,
                                     parse_dates=[0],
                                     index_col=[1])

            #5. extracting data within monitoring window
            monitoring = monitoring[(monitoring.ts >= offsetstart)
                                    & (monitoring.ts <= end)]

            #6. importing LastGoodData csv file of current column to dataframe
            LastGoodData = pd.read_csv(LastGoodData_path + colname +
                                       LastGoodData_file,
                                       names=LastGoodData_file_headers,
                                       parse_dates=[0],
                                       index_col=[1])
            if len(LastGoodData) < num_nodes:
                print colname, " Missing nodes in LastGoodData"

            #7. extracting last data outside monitoring window
            LastGoodData = LastGoodData[(LastGoodData.ts < offsetstart)]
            ##            print "\n",colname
            ##            print LastGoodData

            #8. appending LastGoodData to monitoring
            monitoring = monitoring.append(LastGoodData)
            ##          print monitoring.tail(num_nodes+1)

            #9. replacing date of data outside monitoring window with first date of monitoring window
            monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart

            #10. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
            monitoring['xz'], monitoring['xy'] = gf.accel_to_lin_xz_xy(
                seg_len, monitoring.x.values, monitoring.y.values,
                monitoring.z.values)

            #11. removing unnecessary columns x,y,z
            monitoring = monitoring.drop(['x', 'y', 'z'], axis=1)

            #12. setting ts as index
            monitoring['id'] = monitoring.index.values
            monitoring = monitoring.set_index('ts')

            #13. reordering columns
            monitoring = monitoring[['id', 'xz', 'xy', 'm']]

            ##    print "\n",colname
            ##    print monitoring.tail(20)

            monitoring.to_csv(proc_monitoring_path + colname +
                              proc_monitoring_file,
                              sep=',',
                              header=False,
                              mode='w')

##            print "     ",colname

        except:
            ##            print "     ",colname, "...FAILED"
            continue
def generate_proc(colname, num_nodes, seg_len):
    
    #1. setting date boundaries for real-time monitoring window
    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart=gf.get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops)

    # generating proc monitoring data for each site
    print "Generating PROC monitoring data for:"

    print colname
    print num_nodes
    print seg_len
        
    #3. getting accelerometer data for site 'colname'
    monitoring=GetRawAccelData(colname,offsetstart)
    monitoring = monitoring.loc[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)]
     
    #3.1 identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,num_nodes,offsetstart)
    
    #4: get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = GetSingleLGDPM(colname, node, offsetstart.strftime("%Y-%m-%d %H:%M"))
        lgdpm = lgdpm.append(temp,ignore_index=True)
 
    #5 TODO: Resample the dataframe together with the LGDOM
    monitoring=monitoring.append(lgdpm)

    #6. evaluating which data needs to be filtered
    try:
        monitoring=applyFilters(monitoring)		
        LastGoodData=GetLastGoodData(monitoring,num_nodes)		
        PushLastGoodData(LastGoodData,colname)		
        LastGoodData = GetLastGoodDataFromDb(colname)		
        print 'Done'		
    except:		
        LastGoodData = GetLastGoodDataFromDb(colname)		
        print 'error'		
		
    if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData"		
		
    #5. extracting last data outside monitoring window		
    LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)]		
		
    #6. appending LastGoodData to monitoring		
    monitoring=monitoring.append(LastGoodData)    

    
    #7. replacing date of data outside monitoring window with first date of monitoring window
    monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart

    #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
    monitoring['xz'],monitoring['xy']=gf.accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values)
    
    #9. removing unnecessary columns x,y,z
    monitoring=monitoring.drop(['x','y','z'],axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])

    #10. setting ts as index
#        monitoring['id']=monitoring.index.values
    monitoring=monitoring.set_index('ts')

    #11. reordering columns
    monitoring=monitoring[['id','xz','xy']]
    
    #12. saving proc monitoring data
    if PrintProc:
        monitoring.to_csv(proc_monitoring_path+colname+proc_monitoring_file,sep=',', header=False,mode='w')
        
    return monitoring