示例#1
0
def f_undervoltage(df,column,node,mode):
    '''for v3 only'''
    v2=['NAGSA', 'BAYSB', 'AGBSB', 'MCASB', 'CARSB', 'PEPSB','BLCSA']
#    seek_undervoltage(df,column,node,mode)
    if column in v2:
        v_a1= qs.GetRawAccelData(siteid=column,targetnode=node, msgid=32, batt=1)
        v_a2= qs.GetRawAccelData(siteid=column,targetnode=node, msgid=33, batt=1)
    else:
        v_a1= qs.GetRawAccelData(siteid=column,targetnode=node, msgid=11, batt=1)
        v_a2= qs.GetRawAccelData(siteid=column,targetnode=node, msgid=12, batt=1)        
        
#    v_a1.index = v_a1.ts
    v_a1 = v_a1.set_index("ts")
    v_a1.rename(columns={'v':'v1'}, inplace=True)
    v_a1=v_a1.resample('30Min',base = 0).first()

#    v_a2 = v_a2.set_index("ts")
    v_a2.index = v_a2.ts
    v_a2.rename(columns={'v':'v2'}, inplace=True)
    v_a2=v_a2.resample('30Min', base =0).first()
    
    x=pd.concat([df,v_a1.v1,v_a2.v2],axis=1,ignore_index=True)
    x.columns=['mval1','v1','v2']
    x=x.resample('30Min').first()
    df=x.mval1[((x.v1>3.2) & (x.v1<3.4) & (x.v2>3.2) & (x.v2<3.4)) | (x.v1.isnull() & x.v2.isnull())]
    df = df.resample('30Min',base=0).first()
    return df
示例#2
0
def getFilteredAll():
    try:
        start = sys.argv[1]
    except IndexError:
        start = ''
        
    try:
        end = sys.argv[2] 
    except IndexError:
        end = ''     
        
    try:
        msgid = sys.argv[3] 
    except IndexError:
        msgid = 32
    
    sensors = qs.GetSensorDF()
    
    for s in range(len(sensors)):
        targetTable = sensors.name[s]
        df = qs.GetRawAccelData(siteid = targetTable, fromTime = start, toTime = end, msgid = msgid)
        numElements = len(df.index)
        qs.PrintOut("Number of %s Raw elements: %s" % (targetTable, numElements))
        
        if numElements > 0:
            df_filtered = fs.applyFilters(df, orthof=True, rangef=True, outlierf=True)
            numFiltered = len(df_filtered.index)
            
            if numFiltered > 0:
                qs.PrintOut("Number of %s filtered elements: %s" % (targetTable, numFiltered))
                #print df_filtered
            else:
                qs.PrintOut("No valid filtered data for %s" % (targetTable))
示例#3
0
def querydf(col,nid,time):
    fromTS=time-td(weeks=1)
    try:
        #query data then filter
        df_node=qdb.GetRawAccelData(siteid=col,fromTime=fromTS,toTime=time,batt=1)    
        return df_node
    except:
        print ("Wrong Input!!!")
        col,nid,time=inputinfo()
        df_node=querydf(col,nid,time)
示例#4
0
def seek_undervoltage(df,column,node,mode):
    
    if column in v2:
        v_a1= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=32, batt=1)
        v_a2= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=33, batt=1)
    else:
        v_a1= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=11, batt=1)
        v_a2= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=12, batt=1)
        
    v_a1.index = v_a1.ts
    v_a1.rename(columns={'v':'v1'}, inplace=True)
    v_a1=v_a1.resample('30Min',base=0)
    
    v_a2.index = v_a2.ts
    v_a2.rename(columns={'v':'v2'}, inplace=True)
    v_a2=v_a2.resample('30Min',base=0)
    
    x=pd.concat([df,v_a1.v1,v_a2.v2],axis=1)   
    x=x.resample('30Min',base=0)
    undervoltage =  (x.v1<3.26) | (x.v1>3.40) | (x.v2<3.26) | (x.v2>3.40)
    
    return undervoltage
示例#5
0
def f_undervoltage(df,column,node,mode):
    '''for v3 only'''
#    seek_undervoltage(df,column,node,mode)
    if column in v2:
        v_a1= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=32, batt=1)
        v_a2= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=33, batt=1)
    else:
        v_a1= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=11, batt=1)
        v_a2= qDb.GetRawAccelData(siteid=column,targetnode=node, msgid=12, batt=1)        
        
    v_a1.index = v_a1.ts
    v_a1.rename(columns={'v':'v1'}, inplace=True)
    v_a1=v_a1.resample('30Min',base=0)

    v_a2.index = v_a2.ts
    v_a2.rename(columns={'v':'v2'}, inplace=True)
    v_a2=v_a2.resample('30Min',base=0)
    
    x=pd.concat([df,v_a1.v1,v_a2.v2],axis=1,ignore_index=True)
    x.columns=['mval1','v1','v2']
    x=x.resample('30Min',base=0)
    df=x.mval1[((x.v1>3.2) & (x.v1<3.4) & (x.v2>3.2) & (x.v2<3.4)) | (x.v1.isnull() & x.v2.isnull())]
    df = df.resample('30Min',base=0)
    return df
示例#6
0
def getSensor(start, end, colname, smooth=True):

    df = q.GetRawAccelData(siteid=colname, fromTime=start, toTime=end)
    seg_len = getSegLen(colname)
    df = f.applyFilters(df)

    df['xz'], df['xy'] = accel_to_lin_xz_xy(seg_len, df.x.values, df.y.values,
                                            df.z.values)
    df = df.drop(['x', 'y', 'z'], axis=1)
    df = df.drop_duplicates(['ts', 'id'])

    df['gts'] = df.ts.apply(datenum)

    df = df.set_index('ts')

    df = df[['id', 'xz', 'xy', 'gts']]

    return df
示例#7
0
def generate_proc(colname, num_nodes, seg_len, custom_end,f=False,for_plots=False):
    
    #1. setting date boundaries for real-time monitoring window
#    roll_window_numpts=int(1+roll_window_length/data_dt)
    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart,monwin=get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops,custom_end)

    # generating proc monitoring data for each site
    print "Generating PROC monitoring data for:-->> %s - %s <<--" %(str(colname),str(num_nodes))


    #3. getting accelerometer data for site 'colname'
    monitoring=qdb.GetRawAccelData(colname,offsetstart)
    if f:
        if for_plots:
            monitoring = ffd.filt(monitoring,keep_orig=True)
            return monitoring
        else:
            monitoring = ffd.filt(monitoring)

    else:
        monitoring = monitoring.loc[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)]
     
    #3.1 identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,num_nodes,offsetstart)
#    print NodesNoInitVal
    #4: get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = qdb.GetSingleLGDPM(colname, node, offsetstart.strftime("%Y-%m-%d %H:%M"))
        temp = fsd.applyFilters(temp)
        temp = temp.sort_index(ascending = False)[0:1]        
        lgdpm = lgdpm.append(temp,ignore_index=True)
 
    #5 TODO: Resample the dataframe together with the LGDOM
    monitoring=monitoring.append(lgdpm)

    #6. evaluating which data needs to be filtered
#    try:
    monitoring=fsd.applyFilters(monitoring)		
    LastGoodData=qdb.GetLastGoodData(monitoring,num_nodes)		
    qdb.PushLastGoodData(LastGoodData,colname)		
    LastGoodData = qdb.GetLastGoodDataFromDb(colname)		
    print 'Done'		
	
		
    if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData"		
		
    #5. extracting last data outside monitoring window		
    LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)]		
		
    #6. appending LastGoodData to monitoring		
    monitoring=monitoring.append(LastGoodData)    

    
    #7. replacing date of data outside monitoring window with first date of monitoring window
    monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart

    #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
    monitoring['xz'],monitoring['xy']=accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values)
    
    #9. removing unnecessary columns x,y,z
    monitoring=monitoring.drop(['x','y','z'],axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])

    #10. setting ts as index
    monitoring=monitoring.set_index('ts')

    #11. reordering columns
    monitoring=monitoring[['id','xz','xy']]
    
    return monitoring,monwin
示例#8
0
def getFilteredData(isCmd = True, inSite = "", inNode = 1, inStart = "", inEnd = "", inMsgid = 32):
    if isCmd == True:
        try: #site selection
            site = sys.argv[1]
        except IndexError:
            print "No site has been selected. Script unable to run!"
            return
            
        try: #node selection
            node = sys.argv[2]
    
            if node == 'nil':
                node = -1
        except IndexError:
            node = -1
    
        try: #start date
            start = sys.argv[3]
    
            if start == 'nil':
                start = ''
        except IndexError:
            start = ''
            
        try: #end date
            end = sys.argv[4] 
    
            if end == 'nil':
                end = ''
        except IndexError:
            end = ''       
            
        try: #switch between accel 1 and 2
            msgid = sys.argv[5] 
    
            if msgid == 'nil':
                msgid = 32
        except IndexError:
            msgid = 32
    else:
        site = inSite
        node = inNode
        start = inStart
        end = inEnd
        msgid = inMsgid

    #print "variables: %s %s %s %s %s" % (site,node,start,end,msgid)

    df = qs.GetRawAccelData(siteid = site, fromTime = start, toTime = end, msgid = msgid, targetnode = node)
    numElements = len(df.index)
    qs.PrintOut("Number of %s Raw elements: %s" % (site, numElements))
    
    if numElements > 0:
        df_filtered = fs.applyFilters(df, orthof=True, rangef=True, outlierf=False)
        numFiltered = len(df_filtered.index)
        
        if numFiltered > 0:
            qs.PrintOut("Number of %s filtered elements: %s" % (site, numFiltered))
            return df_filtered
        else:
            qs.PrintOut("No valid filtered data for %s" % (site))
            return pd.DataFrame()
    
    #return empty dataframe
    return pd.DataFrame()
示例#9
0
def genproc(col, window, config, fixpoint, realtime=False):

    monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end)

    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal = GetNodesWithNoInitialData(monitoring, col.nos,
                                               window.offsetstart)

    #get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = q.GetSingleLGDPM(col.name, node,
                                window.offsetstart.strftime("%Y-%m-%d %H:%M"))
        lgdpm = lgdpm.append(temp, ignore_index=True)
    monitoring = monitoring.append(lgdpm)

    try:
        monitoring = flt.applyFilters(monitoring)
        LastGoodData = q.GetLastGoodData(monitoring, col.nos)
        q.PushLastGoodData(LastGoodData, col.name)
        LastGoodData = q.GetLastGoodDataFromDb(col.name)

    except:
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
        print 'error'

    if len(LastGoodData) < col.nos:
        print col.name, " Missing nodes in LastGoodData"

    monitoring = monitoring.loc[monitoring.id <= col.nos]

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart) |
                   (pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(
        col.seglen, monitoring.x.values, monitoring.y.values,
        monitoring.z.values)

    monitoring = monitoring.drop(['x', 'y', 'z'], axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')
    monitoring = monitoring[['name', 'id', 'xz', 'xy']]

    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({
        'name': [0] * len(nodes_noval),
        'id': nodes_noval,
        'xy': [np.nan] * len(nodes_noval),
        'xz': [np.nan] * len(nodes_noval),
        'ts': [window.offsetstart] * len(nodes_noval)
    })
    nodes_nodata = nodes_nodata.set_index('ts')
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config,
                                                      fixpoint, col.nos)

    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(
        resamplenode, window=window).reset_index(level=1).set_index('ts')

    nodal_proc_monitoring = monitoring.groupby('id')

    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill

    filled_smoothened = nodal_proc_monitoring.apply(
        fill_smooth,
        offsetstart=window.offsetstart,
        end=window.end,
        roll_window_numpts=window.numpts,
        to_smooth=to_smooth,
        to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'name']].reset_index()

    monitoring = filled_smoothened.set_index('ts')

    filled_smoothened[
        'td'] = filled_smoothened.ts.values - filled_smoothened.ts.values[0]
    filled_smoothened['td'] = filled_smoothened['td'].apply(
        lambda x: x / np.timedelta64(1, 'D'))

    nodal_filled_smoothened = filled_smoothened.groupby('id')

    disp_vel = nodal_filled_smoothened.apply(node_inst_vel,
                                             roll_window_numpts=window.numpts,
                                             start=window.start)
    disp_vel = disp_vel[['ts', 'xz', 'xy', 'vel_xz', 'vel_xy',
                         'name']].reset_index()
    disp_vel = disp_vel[['ts', 'id', 'xz', 'xy', 'vel_xz', 'vel_xy', 'name']]
    disp_vel = disp_vel.set_index('ts')
    disp_vel = disp_vel.sort_values('id', ascending=True)

    #    return procdata(col,monitoring.sort(),disp_vel.sort_index(),max_min_df,max_min_cml)
    return procdata(col, monitoring.sort_index(), disp_vel.sort_index(),
                    max_min_df, max_min_cml)
示例#10
0
Created on Fri Nov 13 16:24:57 2015

@author: Mizpah
"""

import querySenslopeDb as qs
import filterSensorData as fs

sensors = qs.GetSensorDF()
#print sensors

for s in range(len(sensors)):
    targetTable = sensors.name[s]

    #df = qs.GetRawAccelData(siteid = targetTable, fromTime = "2013-01-01", msgid = 33)
    df = qs.GetRawAccelData(siteid=targetTable, fromTime="2013-01-01")
    numElements = len(df.index)

    if numElements > 0:
        df_resampled = fs.applyFilters(df,
                                       orthof=True,
                                       rangef=True,
                                       outlierf=False)
        df_filtered = fs.applyFilters(df,
                                      orthof=False,
                                      rangef=False,
                                      outlierf=True)
        numFiltered = len(df_filtered.index)

        drawcountx = df_resampled.x.count()
        drawcounty = df_resampled.y.count()
示例#11
0
def genproc(col, window, config, fixpoint, realtime=False, comp_vel=True):
    
    monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end)

    monitoring = flt.applyFilters(monitoring)
    
    try:
        LastGoodData = q.GetLastGoodData(monitoring,col.nos)
        q.PushLastGoodData(LastGoodData,col.name)		
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
    except:	
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
   
    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,col.nos,window.offsetstart)
    
    #get last good data prior to the monitoring window (LGDPM)
    if len(NodesNoInitVal) != 0:
        lgdpm = q.GetSingleLGDPM(col.name, NodesNoInitVal, window.offsetstart)
        if len(lgdpm) != 0:
            lgdpm = flt.applyFilters(lgdpm)
            lgdpm = lgdpm.sort_index(ascending = False).drop_duplicates('id')
        
        if len(lgdpm) != 0:
            monitoring=monitoring.append(lgdpm)
        
    monitoring = monitoring.loc[monitoring.id <= col.nos]

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart)|(pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    invalid_nodes = q.GetNodeStatus(1)
    invalid_nodes = invalid_nodes[invalid_nodes.site == col.name]
    if len(invalid_nodes) != 0:
        stat = invalid_nodes.groupby('node', as_index=False)
        monitoring = stat.apply(remove_invalid, df=monitoring)
    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({'name': [0]*len(nodes_noval), 'id': nodes_noval,
                'x': [0]*len(nodes_noval), 'y': [0]*len(nodes_noval),
                'z': [0]*len(nodes_noval), 'ts': [window.offsetstart]*len(nodes_noval)})
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config, fixpoint, col.nos)

    monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(col.seglen,monitoring.x.values,monitoring.y.values,monitoring.z.values)

    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')
        
    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(resamplenode, window = window).reset_index(level=1).set_index('ts')
    
    nodal_proc_monitoring = monitoring.groupby('id')
    
    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill
    
    filled_smoothened = nodal_proc_monitoring.apply(fill_smooth, offsetstart=window.offsetstart, end=window.end, roll_window_numpts=window.numpts, to_smooth=to_smooth, to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'x', 'y', 'z', 'name']].reset_index()
            
    filled_smoothened['depth'] = filled_smoothened['x']/np.abs(filled_smoothened['x']) * np.sqrt(col.seglen**2 - filled_smoothened['xz']**2 - filled_smoothened['xy']**2)
    filled_smoothened['depth'] = filled_smoothened['depth'].fillna(value=col.seglen)
    filled_smoothened['net_dist'] = np.sqrt((filled_smoothened['xz'] ** 2) + (filled_smoothened['xy'] ** 2))

    monitoring = filled_smoothened.set_index('ts') 
    
    if comp_vel == True:
        filled_smoothened['td'] = filled_smoothened['ts'].values - filled_smoothened['ts'].values[0]
        filled_smoothened['td'] = filled_smoothened['td'].apply(lambda x: x / np.timedelta64(1,'D'))
        
        nodal_filled_smoothened = filled_smoothened.groupby('id') 
        
        disp_vel = nodal_filled_smoothened.apply(node_inst_vel, roll_window_numpts=window.numpts, start=window.start)
        disp_vel = disp_vel.reset_index(drop=True)
        disp_vel = disp_vel.set_index('ts')
        disp_vel = disp_vel.sort_values('id', ascending=True)
    else:
        disp_vel = monitoring
    
    return procdata(col,disp_vel.sort(),max_min_df,max_min_cml)