Example #1
0
def getFilteredAll():
    try:
        start = sys.argv[1]
    except IndexError:
        start = ''
        
    try:
        end = sys.argv[2] 
    except IndexError:
        end = ''     
        
    try:
        msgid = sys.argv[3] 
    except IndexError:
        msgid = 32
    
    sensors = qs.GetSensorDF()
    
    for s in range(len(sensors)):
        targetTable = sensors.name[s]
        df = qs.GetRawAccelData(siteid = targetTable, fromTime = start, toTime = end, msgid = msgid)
        numElements = len(df.index)
        qs.PrintOut("Number of %s Raw elements: %s" % (targetTable, numElements))
        
        if numElements > 0:
            df_filtered = fs.applyFilters(df, orthof=True, rangef=True, outlierf=True)
            numFiltered = len(df_filtered.index)
            
            if numFiltered > 0:
                qs.PrintOut("Number of %s filtered elements: %s" % (targetTable, numFiltered))
                #print df_filtered
            else:
                qs.PrintOut("No valid filtered data for %s" % (targetTable))
def GetSingleLGDPM(site, node, startTS):
    query = "SELECT timestamp, id, xvalue, yvalue, zvalue"
    if len(site) == 5:
        query = query + ", msgid"
    query = query + " from %s WHERE id = %s and timestamp < '%s' " % (site, node, startTS)
    if len(site) == 5:
        query = query + "and (msgid = 32 or msgid = 11) "
#        query = query + "ORDER BY timestamp DESC LIMIT 2"
#    else:
    query = query + "ORDER BY timestamp DESC LIMIT 240"
    
    lgdpm = GetDBDataFrame(query)

#    if len(site) == 5:
#        if len(set(lgdpm.timestamp)) == 1:
#            lgdpm.loc[(lgdpm.msgid == 11) | (lgdpm.msgid == 32)]
#        else:
#            try:
#                lgdpm = lgdpm.loc[lgdpm.timestamp == lgdpm.timestamp[0]]
#            except:
#                print 'no data for node ' + str(node) + ' of ' + site
    
    if len(site) == 5:
        lgdpm.columns = ['ts','id','x','y','z', 'msgid']
    else:
        lgdpm.columns = ['ts','id','x','y','z']
    lgdpm = lgdpm[['ts', 'id', 'x', 'y', 'z']]

    lgdpm = filterSensorData.applyFilters(lgdpm)
    lgdpm = lgdpm.sort_index(ascending = False)[0:1]
    
    return lgdpm
def GenerateLastGoodData():
    
    db = MySQLdb.connect(host = Hostdb, user = Userdb, passwd = Passdb)
    cur = db.cursor()
    #cur.execute("CREATE DATABASE IF NOT EXISTS %s" %nameDB)
    
    query = """    DROP TABLE IF EXISTS `senslopedb`.`lastgooddata`;
        CREATE TABLE  `senslopedb`.`lastgooddata` (
          `name` varchar(8) NOT NULL DEFAULT '',
          `id` int(11) NOT NULL DEFAULT '0',
          `timestamp` datetime NOT NULL DEFAULT '0000-00-00 00:00:00',
          `xvalue` int(11) DEFAULT NULL,
          `yvalue` int(11) DEFAULT NULL,
          `zvalue` int(11) DEFAULT NULL,
          PRIMARY KEY (`name`,`id`)
          ); """
    
    cur.execute(query)
    db.close()
    
    slist = GetSensorList()
    
    for s in slist:
        print s.name, s.nos
        
        df = GetRawAccelData(s.name,'',s.nos)
        df = filterSensorData.applyFilters(df,True,True,False)         
        
        dflgd = GetLastGoodData(df,s.nos,True)
        del df           
          
        PushLastGoodData(dflgd,s.name)
Example #4
0
def GetSingleLGDPM(site, node, startTS):
    query = "SELECT timestamp,id, xvalue, yvalue, zvalue"
    if len(site) == 5:
        query = query + ", msgid"
    query = query + " from %s WHERE id = %s and timestamp < '%s' " % (site, node, startTS)
    if len(site) == 5:
        query = query + "and (msgid = 32 or msgid = 11) "
#        query = query + "ORDER BY timestamp DESC LIMIT 2"
#    else:
    query = query + "ORDER BY timestamp DESC LIMIT 240"
    lgdpm = GetDBDataFrame(query)
    lgdpm['name'] = site 

#    if len(site) == 5:
#        if len(set(lgdpm.timestamp)) == 1:
#            lgdpm.loc[(lgdpm.msgid == 11) | (lgdpm.msgid == 32)]
#        else:
#            try:
#                lgdpm = lgdpm.loc[lgdpm.timestamp == lgdpm.timestamp[0]]
#            except:
#                print 'no data for node ' + str(node) + ' of ' + site
    
    if len(site) == 5:
        lgdpm.columns = ['ts','id','x','y','z', 'msgid','name']
    else:
        lgdpm.columns = ['ts','id','x','y','z','name']
    lgdpm = lgdpm[['ts', 'id', 'x', 'y', 'z','name']]

    lgdpm = filterSensorData.applyFilters(lgdpm)
    lgdpm = lgdpm.sort_index(ascending = False)[0:1]
    
    return lgdpm
def getDF():
        site = sys.argv[1]
        fdate = sys.argv[2]
        tdate = sys.argv[3]
        mid = sys.argv[4]
        nodeid = sys.argv[5]
        df= GetRawAccelData(siteid = site, fromTime = fdate, toTime = tdate,  maxnode = 40, msgid = int(mid), targetnode =  int(nodeid) , batt=1, returndb=True)
        df_filt = filterSensorData.applyFilters(df, orthof=True, rangef=True, outlierf=True)
        df_filt = df_filt.set_index(['ts'])
        dfajson = df_filt.reset_index().to_json(orient='records',date_format='iso')
        dfajson = dfajson.replace("T"," ").replace("Z","").replace(".000","")
     
        print dfajson
Example #6
0
def getSensor(start,end,colname,smooth=True):
    
    df = q.GetRawAccelData(siteid=colname, fromTime=start, toTime=end)
    seg_len = getSegLen(colname)    
    df = f.applyFilters(df)     
    
    df['xz'],df['xy']=accel_to_lin_xz_xy(seg_len,df.x.values,df.y.values,df.z.values)
    df=df.drop(['x','y','z'],axis=1)
    df = df.drop_duplicates(['ts', 'id'])
    
    df['gts'] = df.ts.apply(datenum)  
    
    df=df.set_index('ts')
 
    df=df[['id','xz','xy','gts']]
    
    return df
Example #7
0
def getSensor(start, end, colname, smooth=True):

    df = q.GetRawAccelData(siteid=colname, fromTime=start, toTime=end)
    seg_len = getSegLen(colname)
    df = f.applyFilters(df)

    df['xz'], df['xy'] = accel_to_lin_xz_xy(seg_len, df.x.values, df.y.values,
                                            df.z.values)
    df = df.drop(['x', 'y', 'z'], axis=1)
    df = df.drop_duplicates(['ts', 'id'])

    df['gts'] = df.ts.apply(datenum)

    df = df.set_index('ts')

    df = df[['id', 'xz', 'xy', 'gts']]

    return df
Example #8
0
def GenerateLastGoodData():
    
    db = mysqlDriver.connect(host = Hostdb, user = Userdb, passwd = Passdb)
    cur = db.cursor()
    #cur.execute("CREATE DATABASE IF NOT EXISTS %s" %nameDB)
    
    #Separated the consecutive drop table and create table in one query in
    #   order to fix "commands out of sync" error
    query = "DROP TABLE IF EXISTS `senslopedb`.`lastgooddata`;"
    cur.execute(query)
    
    query = """ CREATE TABLE  `senslopedb`.`lastgooddata` (
          `name` varchar(8) NOT NULL DEFAULT '',
          `id` int(11) NOT NULL DEFAULT '0',
          `timestamp` datetime NOT NULL DEFAULT '0000-00-00 00:00:00',
          `xvalue` int(11) DEFAULT NULL,
          `yvalue` int(11) DEFAULT NULL,
          `zvalue` int(11) DEFAULT NULL,
          PRIMARY KEY (`name`,`id`)
          ); """
    cur.execute(query)
    
    db.close()
    
    slist = GetSensorList()
    
    for s in slist:
        print s.name, s.nos
        
        df = GetRawAccelData(siteid=s.name,maxnode=s.nos)
        df = filterSensorData.applyFilters(df,True,True,False)         
        
        dflgd = GetLastGoodData(df,s.nos,True)
        del df           
          
        try:
            PushLastGoodData(dflgd,s.name)
        except (AttributeError,TypeError):
            PrintOut("Error. Empty database")
def GenerateLastGoodData():
    
    db = mysqlDriver.connect(host = Hostdb, user = Userdb, passwd = Passdb)
    cur = db.cursor()
    #cur.execute("CREATE DATABASE IF NOT EXISTS %s" %nameDB)
    
    #Separated the consecutive drop table and create table in one query in
    #   order to fix "commands out of sync" error
    query = "DROP TABLE IF EXISTS `senslopedb`.`lastgooddata`;"
    cur.execute(query)
    
    query = """ CREATE TABLE  `senslopedb`.`lastgooddata` (
          `name` varchar(8) NOT NULL DEFAULT '',
          `id` int(11) NOT NULL DEFAULT '0',
          `timestamp` datetime NOT NULL DEFAULT '0000-00-00 00:00:00',
          `xvalue` int(11) DEFAULT NULL,
          `yvalue` int(11) DEFAULT NULL,
          `zvalue` int(11) DEFAULT NULL,
          PRIMARY KEY (`name`,`id`)
          ); """
    cur.execute(query)
    
    db.close()
    
    slist = GetSensorList()
    
    for s in slist:
        print s.name, s.nos
        
        df = GetRawAccelData(siteid=s.name,maxnode=s.nos)
        df = filterSensorData.applyFilters(df,True,True,False)         
        
        dflgd = GetLastGoodData(df,s.nos,True)
        del df           
          
        try:
            PushLastGoodData(dflgd,s.name)
        except (AttributeError,TypeError):
            PrintOut("Error. Empty database")
Example #10
0
def genproc(col, window, config, fixpoint, realtime=False):

    monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end)

    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal = GetNodesWithNoInitialData(monitoring, col.nos,
                                               window.offsetstart)

    #get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = q.GetSingleLGDPM(col.name, node,
                                window.offsetstart.strftime("%Y-%m-%d %H:%M"))
        lgdpm = lgdpm.append(temp, ignore_index=True)
    monitoring = monitoring.append(lgdpm)

    try:
        monitoring = flt.applyFilters(monitoring)
        LastGoodData = q.GetLastGoodData(monitoring, col.nos)
        q.PushLastGoodData(LastGoodData, col.name)
        LastGoodData = q.GetLastGoodDataFromDb(col.name)

    except:
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
        print 'error'

    if len(LastGoodData) < col.nos:
        print col.name, " Missing nodes in LastGoodData"

    monitoring = monitoring.loc[monitoring.id <= col.nos]

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart) |
                   (pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(
        col.seglen, monitoring.x.values, monitoring.y.values,
        monitoring.z.values)

    monitoring = monitoring.drop(['x', 'y', 'z'], axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')
    monitoring = monitoring[['name', 'id', 'xz', 'xy']]

    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({
        'name': [0] * len(nodes_noval),
        'id': nodes_noval,
        'xy': [np.nan] * len(nodes_noval),
        'xz': [np.nan] * len(nodes_noval),
        'ts': [window.offsetstart] * len(nodes_noval)
    })
    nodes_nodata = nodes_nodata.set_index('ts')
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config,
                                                      fixpoint, col.nos)

    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(
        resamplenode, window=window).reset_index(level=1).set_index('ts')

    nodal_proc_monitoring = monitoring.groupby('id')

    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill

    filled_smoothened = nodal_proc_monitoring.apply(
        fill_smooth,
        offsetstart=window.offsetstart,
        end=window.end,
        roll_window_numpts=window.numpts,
        to_smooth=to_smooth,
        to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'name']].reset_index()

    monitoring = filled_smoothened.set_index('ts')

    filled_smoothened[
        'td'] = filled_smoothened.ts.values - filled_smoothened.ts.values[0]
    filled_smoothened['td'] = filled_smoothened['td'].apply(
        lambda x: x / np.timedelta64(1, 'D'))

    nodal_filled_smoothened = filled_smoothened.groupby('id')

    disp_vel = nodal_filled_smoothened.apply(node_inst_vel,
                                             roll_window_numpts=window.numpts,
                                             start=window.start)
    disp_vel = disp_vel[['ts', 'xz', 'xy', 'vel_xz', 'vel_xy',
                         'name']].reset_index()
    disp_vel = disp_vel[['ts', 'id', 'xz', 'xy', 'vel_xz', 'vel_xy', 'name']]
    disp_vel = disp_vel.set_index('ts')
    disp_vel = disp_vel.sort_values('id', ascending=True)

    #    return procdata(col,monitoring.sort(),disp_vel.sort_index(),max_min_df,max_min_cml)
    return procdata(col, monitoring.sort_index(), disp_vel.sort_index(),
                    max_min_df, max_min_cml)
Example #11
0
def getFilteredData(isCmd = True, inSite = "", inNode = 1, inStart = "", inEnd = "", inMsgid = 32):
    if isCmd == True:
        try: #site selection
            site = sys.argv[1]
        except IndexError:
            print "No site has been selected. Script unable to run!"
            return
            
        try: #node selection
            node = sys.argv[2]
    
            if node == 'nil':
                node = -1
        except IndexError:
            node = -1
    
        try: #start date
            start = sys.argv[3]
    
            if start == 'nil':
                start = ''
        except IndexError:
            start = ''
            
        try: #end date
            end = sys.argv[4] 
    
            if end == 'nil':
                end = ''
        except IndexError:
            end = ''       
            
        try: #switch between accel 1 and 2
            msgid = sys.argv[5] 
    
            if msgid == 'nil':
                msgid = 32
        except IndexError:
            msgid = 32
    else:
        site = inSite
        node = inNode
        start = inStart
        end = inEnd
        msgid = inMsgid

    #print "variables: %s %s %s %s %s" % (site,node,start,end,msgid)

    df = qs.GetRawAccelData(siteid = site, fromTime = start, toTime = end, msgid = msgid, targetnode = node)
    numElements = len(df.index)
    qs.PrintOut("Number of %s Raw elements: %s" % (site, numElements))
    
    if numElements > 0:
        df_filtered = fs.applyFilters(df, orthof=True, rangef=True, outlierf=False)
        numFiltered = len(df_filtered.index)
        
        if numFiltered > 0:
            qs.PrintOut("Number of %s filtered elements: %s" % (site, numFiltered))
            return df_filtered
        else:
            qs.PrintOut("No valid filtered data for %s" % (site))
            return pd.DataFrame()
    
    #return empty dataframe
    return pd.DataFrame()
import numpy as np
from datetime import timedelta as td
from datetime import datetime as dt
import sqlalchemy
from sqlalchemy import create_engine
import sys
import requests

site = sys.argv[1]
fdate = sys.argv[2]
tdate = sys.argv[3]
nid = sys.argv[4].replace("-", ",")
#site = "blcb"
#fdate = "2014-05-25"
#tdate = "2016-06-25"
#nid = "1-2-3".replace("-",",")
engine = create_engine(
    'mysql+mysqldb://updews:[email protected]/senslopedb')
query = "SELECT * FROM senslopedb.%s where timestamp between '%s ' and '%s' and id in (%s)" % (
    site, fdate, tdate, nid)
df = pd.io.sql.read_sql(query, engine)
df.columns = ['ts', 'id', 'x', 'y', 'z', 's']
df['name'] = site
df_filt = filterSensorData.applyFilters(df,
                                        orthof=True,
                                        rangef=True,
                                        outlierf=True)
dfajson = df_filt.reset_index().to_json(orient='records', date_format='iso')
dfajson = dfajson.replace("T", " ").replace("Z", "").replace(".000", "")
print dfajson
Example #13
0
def generate_proc(colname, num_nodes, seg_len, custom_end,roll_window_length,data_dt,rt_window_length,num_roll_window_ops,filt=False,for_plots=False):
    
    #1. setting date boundaries for real-time monitoring window
#    roll_window_numpts=int(1+roll_window_length/data_dt)

    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart,monwin=get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops,custom_end)
#    print "end inside generate_proc ------------>>>> %s" %str(end)
    # generating proc monitoring data for each site
    print "Generating PROC monitoring data for:-->> %s - %s <<--" %(str(colname),str(num_nodes))


    #3. getting accelerometer data for site 'colname'

    if filt:
        if for_plots:
            custom_start = offsetstart  - timedelta(days=4)
            monitoring=qdb.GetRawAccelData(colname,custom_start)
            monitoring = ffd.filt(monitoring,keep_orig=True)
            earliest_ts = monitoring.ts.min()
            print "offsetstart ---------> %s " %str(offsetstart)
            print "earliest_ts ---------> %s " %str(earliest_ts)
            monitoring = monitoring[(monitoring.ts >= custom_start) & (monitoring.ts <= end)]
            return monitoring
        else:
            custom_start = offsetstart  - timedelta(days=4)
            monitoring=qdb.GetRawAccelData(colname,custom_start)
#            monitoring=qdb.GetRawAccelData(colname,offsetstart)
            monitoring = ffd.filt(monitoring)
            monitoring = monitoring[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)]
    else:
        monitoring=qdb.GetRawAccelData(colname,offsetstart)
        monitoring = monitoring[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)]

    #3.1 identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,num_nodes,offsetstart)
#    print NodesNoInitVal
    #4: get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = qdb.GetSingleLGDPM(colname, node, offsetstart.strftime("%Y-%m-%d %H:%M"))
        temp = fsd.applyFilters(temp)
        temp = temp.sort_index(ascending = False)[0:1]        
        lgdpm = lgdpm.append(temp,ignore_index=True)
 
    #5 TODO: Resample the dataframe together with the LGDOM
    monitoring=monitoring.append(lgdpm)

    #6. evaluating which data needs to be filtered
#    try:
    monitoring=fsd.applyFilters(monitoring)		
    LastGoodData=qdb.GetLastGoodData(monitoring,num_nodes)		
    qdb.PushLastGoodData(LastGoodData,colname)		
    LastGoodData = qdb.GetLastGoodDataFromDb(colname)		
    print 'Done'		
	
		
    if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData"		
		
    #5. extracting last data outside monitoring window		
    LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)]		
		
    #6. appending LastGoodData to monitoring		
    monitoring=monitoring.append(LastGoodData)    

    
    #7. replacing date of data outside monitoring window with first date of monitoring window
    monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart

    #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
    monitoring['xz'],monitoring['xy']=accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values)
    
    #9. removing unnecessary columns x,y,z
    monitoring=monitoring.drop(['x','y','z'],axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])

    #10. setting ts as index
    monitoring=monitoring.set_index('ts')

    #11. reordering columns
    monitoring=monitoring[['id','xz','xy']]
    
    return monitoring,monwin
Example #14
0
def genproc(col, window, config, fixpoint='', realtime=False, comp_vel=True):
    
    if fixpoint == '':
        fixpoint = config.io.column_fix
    
    monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end)
        
    monitoring = flt.applyFilters(monitoring)
    
    try:
        LastGoodData = q.GetLastGoodData(monitoring,col.nos)
        q.PushLastGoodData(LastGoodData,col.name)		
    except:	
        pass
   
    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,col.nos,window.offsetstart)
    
    #get last good data prior to the monitoring window (LGDPM)
    if len(NodesNoInitVal) != 0:
        lgdpm = q.GetSingleLGDPM(col.name, NodesNoInitVal, window.offsetstart)
        if len(lgdpm) != 0:
            lgdpm = flt.applyFilters(lgdpm)
            lgdpm = lgdpm.sort_index(ascending = False).drop_duplicates('id')
        
        if len(lgdpm) != 0:
            monitoring=monitoring.append(lgdpm)
        
    monitoring = monitoring.loc[monitoring.id <= col.nos]
        
    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart)|(pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    invalid_nodes = q.GetNodeStatus(1)
    invalid_nodes = invalid_nodes[invalid_nodes.site == col.name]
    if len(invalid_nodes) != 0:
        stat = invalid_nodes.groupby('node', as_index=False)
        monitoring = stat.apply(remove_invalid, df=monitoring)

    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({'name': [0]*len(nodes_noval), 'id': nodes_noval,
                'x': [0]*len(nodes_noval), 'y': [0]*len(nodes_noval),
                'z': [0]*len(nodes_noval), 'ts': [window.offsetstart]*len(nodes_noval)})
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config, fixpoint, col.nos)

    monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(col.seglen,monitoring.x.values,monitoring.y.values,monitoring.z.values)

    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')
        
    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(resamplenode, window = window).reset_index(level=1).set_index('ts')
    
    nodal_proc_monitoring = monitoring.groupby('id')
    
    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill
    
    filled_smoothened = nodal_proc_monitoring.apply(fill_smooth, offsetstart=window.offsetstart, end=window.end, roll_window_numpts=window.numpts, to_smooth=to_smooth, to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'x', 'y', 'z', 'name']].reset_index()
            
    filled_smoothened['depth'] = filled_smoothened['x']/np.abs(filled_smoothened['x']) * np.sqrt(col.seglen**2 - filled_smoothened['xz']**2 - filled_smoothened['xy']**2)
    filled_smoothened['depth'] = filled_smoothened['depth'].fillna(value=col.seglen)

    monitoring = filled_smoothened.set_index('ts')   

    if comp_vel == True:
        filled_smoothened['td'] = filled_smoothened['ts'].values - filled_smoothened['ts'].values[0]
        filled_smoothened['td'] = filled_smoothened['td'].apply(lambda x: x / np.timedelta64(1,'D'))
        
        nodal_filled_smoothened = filled_smoothened.groupby('id') 
        
        disp_vel = nodal_filled_smoothened.apply(node_inst_vel, roll_window_numpts=window.numpts, start=window.start)
        disp_vel = disp_vel.reset_index(drop=True)
        disp_vel = disp_vel.set_index('ts')
        disp_vel = disp_vel.sort_values('id', ascending=True)
    else:
        disp_vel = monitoring
    
    return procdata(col,disp_vel.sort_index(),max_min_df,max_min_cml)
import filterSensorData as fs


sensors = qs.GetSensorDF()
#print sensors

for s in range(len(sensors)):
    targetTable = sensors.name[s]

    #df = qs.GetRawAccelData(siteid = targetTable, fromTime = "2013-01-01", msgid = 33)
    df = qs.GetRawAccelData(siteid = targetTable, fromTime = "2013-01-01")
    numElements = len(df.index)

        
    if numElements > 0:
        df_resampled = fs.applyFilters(df, orthof=True, rangef=True, outlierf=False)
        df_filtered = fs.applyFilters(df, orthof=False, rangef=False, outlierf=True)
        numFiltered= len(df_filtered.index)
        
        drawcountx = df_resampled.x.count()
        drawcounty = df_resampled.y.count()
        drawcountz = df_resampled.z.count()
        dfinalcountx = df_filtered.x.count()
        dfinalcounty = df_filtered.y.count()
        dfinalcountz = df_filtered.z.count()
            
        if numFiltered > 0:
            qs.PrintOut("Data Count Summary for %s" %(targetTable))
            qs.PrintOut("Raw Data (resampled without pad)")
            qs.PrintOut("Xraw: %s" % (drawcountx))
            qs.PrintOut("Yraw: %s" % (drawcounty))
def process(df_node):
    dff=fsd.applyFilters(df_node)
    
    #time
    
    fromTime=time-td(days=1, hours=4)
    toTime=time-td(hours=4)
    dfr = df_node[(df_node.id==nid)&(df_node.ts>=fromTime) & (df_node.ts<=toTime)] 
    df = dff[(dff.id==nid)&(dff.ts>=fromTime) & (dff.ts<=toTime)]
    df = df.set_index('ts')  
    
    
    #Integer index
    N=len(df.index)
    df['i']=range(1,N+1,1)
    
    x_corr = (df['x'].corr(df.i))**2 
    y_corr = (df['y'].corr(df.i))**2
    z_corr = (df['z'].corr(df.i))**2
    
    stx,sty,stz=df[['x','y','z']].std()
    
    dft=df_node[(df_node.ts>=toTime)&(df_node.ts<=time)&(df_node.id==nid)]
    c= 100*dfr.x.count()/48
    cf= 100*df.x.count()/dfr.x.count()
                      
    if not dft.empty:
        df1=dft[['x','y','z']].loc[max(dft.index)]
        x,y,z=df1
        xdeg,ydeg,zdeg=np.degrees(np.arcsin(df1/1024.0))
    else:
        print ("No data!\n")
        return 1
        x,y,z=np.nan,np.nan,np.nan
        xdeg,ydeg,zdeg=np.nan,np.nan,np.nan
        
    dfft=dff[(dff.ts>=toTime)&(dff.ts<=time)&(dff.id==nid)]
    dfft=dfft[['x','y','z']].loc[max(dfft.index)]
    
    delx=dfft.x-df.x.mean()
    dely=dfft.y-df.y.mean()
    delz=dfft.z-df.z.mean()
    
    xdegdel=np.degrees(np.arcsin(dfft.x/1024))-np.degrees(np.arcsin(df.x.mean()/1024))
    ydegdel=np.degrees(np.arcsin(dfft.y/1024))-np.degrees(np.arcsin(df.y.mean()/1024))
    zdegdel=np.degrees(np.arcsin(dfft.z/1024))-np.degrees(np.arcsin(df.z.mean()/1024))
    
    
    #slope, intercept, r_value, p_value, std_err = stats.linregress(df.x,df.i)
    print("########################################################################\n")
    print(col+str(nid)+' ('+str(time)+')')
    #tag    
    query='''SELECT * FROM senslopedb.node_status
                where site='%s' and node=%d and inUse=1
                order by date_of_identification desc'''%(col,nid) 
    dfs=qdb.GetDBDataFrame(query)
    if not dfs.empty:
        print ('\nStatus:\t\t%s'%dfs.status[0])
        print ('Comment:\t%s'%dfs.comment[0])
    else:
        print ('\nStatus:\t\tOK')    
    
    print('\t\tx\ty\tz')
    print('raw data=\t\t(%d,\t%d,\t%d)' %(x,y,z))
    
    print("standard dev= \t%.2f,\t%.2f,\t%.2f" %(stx,sty,stz))
    print("correlation= \t%.2f, \t%.2f, \t%.2f" %(x_corr,y_corr,z_corr))
    print("bit delta= \t\t%.2f, \t%.2f, \t%.2f" %(delx,dely,delz))
    print('%%data sent count= \t%.0f%%' %c)
    print('%%filter/raw count= \t%.0f%%' %cf)
    
    print('data theta(deg)= \t(%.2f,\t%.2f,\t%.2f)' %(xdeg,ydeg,zdeg))
    print("delta theta(deg)= \t%.2f, \t%.2f, \t%.2f" %(xdegdel,ydegdel,zdegdel))
    
    if len(col)==5:
        batt=dft.batt.loc[max(dft.index)]
        query='''SELECT site_name,node_id,version,vmax,vmin FROM senslopedb.node_accel_table
                where site_name='%s' and node_id=%d'''%(col,nid) 
        dfb=qdb.GetDBDataFrame(query)
        print('\nBattery:')
        print('battery (min,max)= \t(%.2fV,\t%.2fV)'%(dfb.vmin[0],dfb.vmax[0]))
        print('battery voltage= \t%.2fV' %batt)
        if batt>dfb.vmax[0]:
            batt_del=batt-dfb.vmax[0]
        elif batt<dfb.vmin[0]:
            batt_del=batt-dfb.vmin[0]
        else:
            batt_del=0
        print('Delta Battery= \t\t%.2fV'%batt_del)
    
    print("\n########################################################################\n")
    
        
    xyz.xyzplot(dff,col,nid,time)
Example #17
0
import querySenslopeDb as qs
import filterSensorData as fs

sensors = qs.GetSensorDF()
#print sensors

for s in range(len(sensors)):
    targetTable = sensors.name[s]

    #df = qs.GetRawAccelData(siteid = targetTable, fromTime = "2013-01-01", msgid = 33)
    df = qs.GetRawAccelData(siteid=targetTable, fromTime="2013-01-01")
    numElements = len(df.index)

    if numElements > 0:
        df_resampled = fs.applyFilters(df,
                                       orthof=True,
                                       rangef=True,
                                       outlierf=False)
        df_filtered = fs.applyFilters(df,
                                      orthof=False,
                                      rangef=False,
                                      outlierf=True)
        numFiltered = len(df_filtered.index)

        drawcountx = df_resampled.x.count()
        drawcounty = df_resampled.y.count()
        drawcountz = df_resampled.z.count()
        dfinalcountx = df_filtered.x.count()
        dfinalcounty = df_filtered.y.count()
        dfinalcountz = df_filtered.z.count()

        if numFiltered > 0:
Example #18
0
def genproc(col, window, config, fixpoint, realtime=False, comp_vel=True):
    
    monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end)

    monitoring = flt.applyFilters(monitoring)
    
    try:
        LastGoodData = q.GetLastGoodData(monitoring,col.nos)
        q.PushLastGoodData(LastGoodData,col.name)		
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
    except:	
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
   
    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,col.nos,window.offsetstart)
    
    #get last good data prior to the monitoring window (LGDPM)
    if len(NodesNoInitVal) != 0:
        lgdpm = q.GetSingleLGDPM(col.name, NodesNoInitVal, window.offsetstart)
        if len(lgdpm) != 0:
            lgdpm = flt.applyFilters(lgdpm)
            lgdpm = lgdpm.sort_index(ascending = False).drop_duplicates('id')
        
        if len(lgdpm) != 0:
            monitoring=monitoring.append(lgdpm)
        
    monitoring = monitoring.loc[monitoring.id <= col.nos]

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart)|(pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    invalid_nodes = q.GetNodeStatus(1)
    invalid_nodes = invalid_nodes[invalid_nodes.site == col.name]
    if len(invalid_nodes) != 0:
        stat = invalid_nodes.groupby('node', as_index=False)
        monitoring = stat.apply(remove_invalid, df=monitoring)
    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({'name': [0]*len(nodes_noval), 'id': nodes_noval,
                'x': [0]*len(nodes_noval), 'y': [0]*len(nodes_noval),
                'z': [0]*len(nodes_noval), 'ts': [window.offsetstart]*len(nodes_noval)})
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config, fixpoint, col.nos)

    monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(col.seglen,monitoring.x.values,monitoring.y.values,monitoring.z.values)

    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')
        
    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(resamplenode, window = window).reset_index(level=1).set_index('ts')
    
    nodal_proc_monitoring = monitoring.groupby('id')
    
    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill
    
    filled_smoothened = nodal_proc_monitoring.apply(fill_smooth, offsetstart=window.offsetstart, end=window.end, roll_window_numpts=window.numpts, to_smooth=to_smooth, to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'x', 'y', 'z', 'name']].reset_index()
            
    filled_smoothened['depth'] = filled_smoothened['x']/np.abs(filled_smoothened['x']) * np.sqrt(col.seglen**2 - filled_smoothened['xz']**2 - filled_smoothened['xy']**2)
    filled_smoothened['depth'] = filled_smoothened['depth'].fillna(value=col.seglen)
    filled_smoothened['net_dist'] = np.sqrt((filled_smoothened['xz'] ** 2) + (filled_smoothened['xy'] ** 2))

    monitoring = filled_smoothened.set_index('ts') 
    
    if comp_vel == True:
        filled_smoothened['td'] = filled_smoothened['ts'].values - filled_smoothened['ts'].values[0]
        filled_smoothened['td'] = filled_smoothened['td'].apply(lambda x: x / np.timedelta64(1,'D'))
        
        nodal_filled_smoothened = filled_smoothened.groupby('id') 
        
        disp_vel = nodal_filled_smoothened.apply(node_inst_vel, roll_window_numpts=window.numpts, start=window.start)
        disp_vel = disp_vel.reset_index(drop=True)
        disp_vel = disp_vel.set_index('ts')
        disp_vel = disp_vel.sort_values('id', ascending=True)
    else:
        disp_vel = monitoring
    
    return procdata(col,disp_vel.sort(),max_min_df,max_min_cml)
Example #19
0
def generate_proc(colname, num_nodes, seg_len, custom_end,f=False,for_plots=False):
    
    #1. setting date boundaries for real-time monitoring window
#    roll_window_numpts=int(1+roll_window_length/data_dt)
    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart,monwin=get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops,custom_end)

    # generating proc monitoring data for each site
    print "Generating PROC monitoring data for:-->> %s - %s <<--" %(str(colname),str(num_nodes))


    #3. getting accelerometer data for site 'colname'
    monitoring=qdb.GetRawAccelData(colname,offsetstart)
    if f:
        if for_plots:
            monitoring = ffd.filt(monitoring,keep_orig=True)
            return monitoring
        else:
            monitoring = ffd.filt(monitoring)

    else:
        monitoring = monitoring.loc[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)]
     
    #3.1 identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,num_nodes,offsetstart)
#    print NodesNoInitVal
    #4: get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = qdb.GetSingleLGDPM(colname, node, offsetstart.strftime("%Y-%m-%d %H:%M"))
        temp = fsd.applyFilters(temp)
        temp = temp.sort_index(ascending = False)[0:1]        
        lgdpm = lgdpm.append(temp,ignore_index=True)
 
    #5 TODO: Resample the dataframe together with the LGDOM
    monitoring=monitoring.append(lgdpm)

    #6. evaluating which data needs to be filtered
#    try:
    monitoring=fsd.applyFilters(monitoring)		
    LastGoodData=qdb.GetLastGoodData(monitoring,num_nodes)		
    qdb.PushLastGoodData(LastGoodData,colname)		
    LastGoodData = qdb.GetLastGoodDataFromDb(colname)		
    print 'Done'		
	
		
    if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData"		
		
    #5. extracting last data outside monitoring window		
    LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)]		
		
    #6. appending LastGoodData to monitoring		
    monitoring=monitoring.append(LastGoodData)    

    
    #7. replacing date of data outside monitoring window with first date of monitoring window
    monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart

    #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
    monitoring['xz'],monitoring['xy']=accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values)
    
    #9. removing unnecessary columns x,y,z
    monitoring=monitoring.drop(['x','y','z'],axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])

    #10. setting ts as index
    monitoring=monitoring.set_index('ts')

    #11. reordering columns
    monitoring=monitoring[['id','xz','xy']]
    
    return monitoring,monwin
Example #20
0
def process(df_node):
    dff=fsd.applyFilters(df_node)
    
    #time
    
    fromTime=time-td(days=1, hours=4)
    toTime=time-td(hours=4)
    dfr = df_node[(df_node.id==nid)&(df_node.ts>=fromTime) & (df_node.ts<=toTime)] 
    df = dff[(dff.id==nid)&(dff.ts>=fromTime) & (dff.ts<=toTime)]
    df = df.set_index('ts')  
    
    
    #Integer index
    N=len(df.index)
    df['i']=range(1,N+1,1)
    
    x_corr = (df['x'].corr(df.i))**2 
    y_corr = (df['y'].corr(df.i))**2
    z_corr = (df['z'].corr(df.i))**2
    
    stx,sty,stz=df[['x','y','z']].std()
    
    dft=df_node[(df_node.ts>=toTime)&(df_node.ts<=time)&(df_node.id==nid)]
    c= 100*dfr.x.count()/48
    cf= 100*df.x.count()/dfr.x.count()
                      
    if not dft.empty:
        df1=dft[['x','y','z']].loc[max(dft.index)]
        x,y,z=df1
        xdeg,ydeg,zdeg=np.degrees(np.arcsin(df1/1024.0))
    else:
        print ("No data!\n")
        return 1
        x,y,z=np.nan,np.nan,np.nan
        xdeg,ydeg,zdeg=np.nan,np.nan,np.nan
        
    dfft=dff[(dff.ts>=toTime)&(dff.ts<=time)&(dff.id==nid)]
    dfft=dfft[['x','y','z']].loc[max(dfft.index)]
    
    delx=dfft.x-df.x.mean()
    dely=dfft.y-df.y.mean()
    delz=dfft.z-df.z.mean()
    
    xdegdel=np.degrees(np.arcsin(dfft.x/1024))-np.degrees(np.arcsin(df.x.mean()/1024))
    ydegdel=np.degrees(np.arcsin(dfft.y/1024))-np.degrees(np.arcsin(df.y.mean()/1024))
    zdegdel=np.degrees(np.arcsin(dfft.z/1024))-np.degrees(np.arcsin(df.z.mean()/1024))
    
    
    #slope, intercept, r_value, p_value, std_err = stats.linregress(df.x,df.i)
    print("########################################################################\n")
    print(col+str(nid)+' ('+str(time)+')')
    #tag    
    query='''SELECT * FROM senslopedb.node_status
                where site='%s' and node=%d and inUse=1
                order by date_of_identification desc'''%(col,nid) 
    dfs=qdb.GetDBDataFrame(query)
    if not dfs.empty:
        print ('\nStatus:\t\t%s'%dfs.status[0])
        print ('Comment:\t%s'%dfs.comment[0])
    else:
        print ('\nStatus:\t\tOK')    
    
    print('\t\tx\ty\tz')
    print('raw data=\t\t(%d,\t%d,\t%d)' %(x,y,z))
    
    print("standard dev= \t%.2f,\t%.2f,\t%.2f" %(stx,sty,stz))
    print("correlation= \t%.2f, \t%.2f, \t%.2f" %(x_corr,y_corr,z_corr))
    print("bit delta= \t\t%.2f, \t%.2f, \t%.2f" %(delx,dely,delz))
    print('%%data sent count= \t%.0f%%' %c)
    print('%%filter/raw count= \t%.0f%%' %cf)
    
    print('data theta(deg)= \t(%.2f,\t%.2f,\t%.2f)' %(xdeg,ydeg,zdeg))
    print("delta theta(deg)= \t%.2f, \t%.2f, \t%.2f" %(xdegdel,ydegdel,zdegdel))
    
    if len(col)==5:
        batt=dft.batt.loc[max(dft.index)]
        query='''SELECT site_name,node_id,version,vmax,vmin FROM senslopedb.node_accel_table
                where site_name='%s' and node_id=%d'''%(col,nid) 
        dfb=qdb.GetDBDataFrame(query)
        print('\nBattery:')
        print('battery (min,max)= \t(%.2fV,\t%.2fV)'%(dfb.vmin[0],dfb.vmax[0]))
        print('battery voltage= \t%.2fV' %batt)
        if batt>dfb.vmax[0]:
            batt_del=batt-dfb.vmax[0]
        elif batt<dfb.vmin[0]:
            batt_del=batt-dfb.vmin[0]
        else:
            batt_del=0
        print('Delta Battery= \t\t%.2fV'%batt_del)
    
    print("\n########################################################################\n")
    
        
    xyz.xyzplot(dff,col,nid,time)