def generate_proc(colname, num_nodes, seg_len, custom_end,f=False,for_plots=False): #1. setting date boundaries for real-time monitoring window # roll_window_numpts=int(1+roll_window_length/data_dt) roll_window_numpts=int(1+roll_window_length/data_dt) end, start, offsetstart,monwin=get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops,custom_end) # generating proc monitoring data for each site print "Generating PROC monitoring data for:-->> %s - %s <<--" %(str(colname),str(num_nodes)) #3. getting accelerometer data for site 'colname' monitoring=qdb.GetRawAccelData(colname,offsetstart) if f: if for_plots: monitoring = ffd.filt(monitoring,keep_orig=True) return monitoring else: monitoring = ffd.filt(monitoring) else: monitoring = monitoring.loc[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)] #3.1 identify the node ids with no data at start of monitoring window NodesNoInitVal=GetNodesWithNoInitialData(monitoring,num_nodes,offsetstart) # print NodesNoInitVal #4: get last good data prior to the monitoring window (LGDPM) lgdpm = pd.DataFrame() for node in NodesNoInitVal: temp = qdb.GetSingleLGDPM(colname, node, offsetstart.strftime("%Y-%m-%d %H:%M")) temp = fsd.applyFilters(temp) temp = temp.sort_index(ascending = False)[0:1] lgdpm = lgdpm.append(temp,ignore_index=True) #5 TODO: Resample the dataframe together with the LGDOM monitoring=monitoring.append(lgdpm) #6. evaluating which data needs to be filtered # try: monitoring=fsd.applyFilters(monitoring) LastGoodData=qdb.GetLastGoodData(monitoring,num_nodes) qdb.PushLastGoodData(LastGoodData,colname) LastGoodData = qdb.GetLastGoodDataFromDb(colname) print 'Done' if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData" #5. extracting last data outside monitoring window LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)] #6. appending LastGoodData to monitoring monitoring=monitoring.append(LastGoodData) #7. replacing date of data outside monitoring window with first date of monitoring window monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe monitoring['xz'],monitoring['xy']=accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values) #9. removing unnecessary columns x,y,z monitoring=monitoring.drop(['x','y','z'],axis=1) monitoring = monitoring.drop_duplicates(['ts', 'id']) #10. setting ts as index monitoring=monitoring.set_index('ts') #11. reordering columns monitoring=monitoring[['id','xz','xy']] return monitoring,monwin
def genproc(col, window, config, fixpoint, realtime=False): monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end) #identify the node ids with no data at start of monitoring window NodesNoInitVal = GetNodesWithNoInitialData(monitoring, col.nos, window.offsetstart) #get last good data prior to the monitoring window (LGDPM) lgdpm = pd.DataFrame() for node in NodesNoInitVal: temp = q.GetSingleLGDPM(col.name, node, window.offsetstart.strftime("%Y-%m-%d %H:%M")) lgdpm = lgdpm.append(temp, ignore_index=True) monitoring = monitoring.append(lgdpm) try: monitoring = flt.applyFilters(monitoring) LastGoodData = q.GetLastGoodData(monitoring, col.nos) q.PushLastGoodData(LastGoodData, col.name) LastGoodData = q.GetLastGoodDataFromDb(col.name) except: LastGoodData = q.GetLastGoodDataFromDb(col.name) print 'error' if len(LastGoodData) < col.nos: print col.name, " Missing nodes in LastGoodData" monitoring = monitoring.loc[monitoring.id <= col.nos] #assigns timestamps from LGD to be timestamp of offsetstart monitoring.loc[(monitoring.ts < window.offsetstart) | (pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy( col.seglen, monitoring.x.values, monitoring.y.values, monitoring.z.values) monitoring = monitoring.drop(['x', 'y', 'z'], axis=1) monitoring = monitoring.drop_duplicates(['ts', 'id']) monitoring = monitoring.set_index('ts') monitoring = monitoring[['name', 'id', 'xz', 'xy']] nodes_noval = GetNodesWithNoData(monitoring, col.nos) nodes_nodata = pd.DataFrame({ 'name': [0] * len(nodes_noval), 'id': nodes_noval, 'xy': [np.nan] * len(nodes_noval), 'xz': [np.nan] * len(nodes_noval), 'ts': [window.offsetstart] * len(nodes_noval) }) nodes_nodata = nodes_nodata.set_index('ts') monitoring = monitoring.append(nodes_nodata) max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config, fixpoint, col.nos) #resamples xz and xy values per node using forward fill monitoring = monitoring.groupby('id').apply( resamplenode, window=window).reset_index(level=1).set_index('ts') nodal_proc_monitoring = monitoring.groupby('id') if not realtime: to_smooth = config.io.to_smooth to_fill = config.io.to_fill else: to_smooth = config.io.rt_to_smooth to_fill = config.io.rt_to_fill filled_smoothened = nodal_proc_monitoring.apply( fill_smooth, offsetstart=window.offsetstart, end=window.end, roll_window_numpts=window.numpts, to_smooth=to_smooth, to_fill=to_fill) filled_smoothened = filled_smoothened[['xz', 'xy', 'name']].reset_index() monitoring = filled_smoothened.set_index('ts') filled_smoothened[ 'td'] = filled_smoothened.ts.values - filled_smoothened.ts.values[0] filled_smoothened['td'] = filled_smoothened['td'].apply( lambda x: x / np.timedelta64(1, 'D')) nodal_filled_smoothened = filled_smoothened.groupby('id') disp_vel = nodal_filled_smoothened.apply(node_inst_vel, roll_window_numpts=window.numpts, start=window.start) disp_vel = disp_vel[['ts', 'xz', 'xy', 'vel_xz', 'vel_xy', 'name']].reset_index() disp_vel = disp_vel[['ts', 'id', 'xz', 'xy', 'vel_xz', 'vel_xy', 'name']] disp_vel = disp_vel.set_index('ts') disp_vel = disp_vel.sort_values('id', ascending=True) # return procdata(col,monitoring.sort(),disp_vel.sort_index(),max_min_df,max_min_cml) return procdata(col, monitoring.sort_index(), disp_vel.sort_index(), max_min_df, max_min_cml)
columnName = column[0] if len(columnName) <= 6: #Get list of nodes for column # queryNodes = 'SELECT DISTINCT id FROM %s WHERE id > 0 AND id < 60 ORDER BY id' % (columnName) # cur.execute(queryNodes) # # nodes = cur.fetchall() nodes = [ 1., 2., 3., 4., 5., 6., 7., 8., 9., 10., 11., 12., 13., 14., 15., 16., 17. ] for node in nodes: # node = nodeData[0] lgdpm = qs.GetSingleLGDPM(columnName, node, aLitteBitAgo) print lgdpm # print "%s: %s" % (columnName, node) # Accel Inputs should be: # a. column # b. nid # c. version # d. start date # e. end date # TODO: add your accelerometer filter here # test = naf.newAccelFilterFxn(columnName, node, version, fdate, tdate) # print test # print "row count: %s" % (len(test.index))
def genproc(col, window, config, fixpoint, realtime=False, comp_vel=True): monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end) monitoring = flt.applyFilters(monitoring) try: LastGoodData = q.GetLastGoodData(monitoring,col.nos) q.PushLastGoodData(LastGoodData,col.name) LastGoodData = q.GetLastGoodDataFromDb(col.name) except: LastGoodData = q.GetLastGoodDataFromDb(col.name) #identify the node ids with no data at start of monitoring window NodesNoInitVal=GetNodesWithNoInitialData(monitoring,col.nos,window.offsetstart) #get last good data prior to the monitoring window (LGDPM) if len(NodesNoInitVal) != 0: lgdpm = q.GetSingleLGDPM(col.name, NodesNoInitVal, window.offsetstart) if len(lgdpm) != 0: lgdpm = flt.applyFilters(lgdpm) lgdpm = lgdpm.sort_index(ascending = False).drop_duplicates('id') if len(lgdpm) != 0: monitoring=monitoring.append(lgdpm) monitoring = monitoring.loc[monitoring.id <= col.nos] #assigns timestamps from LGD to be timestamp of offsetstart monitoring.loc[(monitoring.ts < window.offsetstart)|(pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart invalid_nodes = q.GetNodeStatus(1) invalid_nodes = invalid_nodes[invalid_nodes.site == col.name] if len(invalid_nodes) != 0: stat = invalid_nodes.groupby('node', as_index=False) monitoring = stat.apply(remove_invalid, df=monitoring) nodes_noval = GetNodesWithNoData(monitoring, col.nos) nodes_nodata = pd.DataFrame({'name': [0]*len(nodes_noval), 'id': nodes_noval, 'x': [0]*len(nodes_noval), 'y': [0]*len(nodes_noval), 'z': [0]*len(nodes_noval), 'ts': [window.offsetstart]*len(nodes_noval)}) monitoring = monitoring.append(nodes_nodata) max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config, fixpoint, col.nos) monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(col.seglen,monitoring.x.values,monitoring.y.values,monitoring.z.values) monitoring = monitoring.drop_duplicates(['ts', 'id']) monitoring = monitoring.set_index('ts') #resamples xz and xy values per node using forward fill monitoring = monitoring.groupby('id').apply(resamplenode, window = window).reset_index(level=1).set_index('ts') nodal_proc_monitoring = monitoring.groupby('id') if not realtime: to_smooth = config.io.to_smooth to_fill = config.io.to_fill else: to_smooth = config.io.rt_to_smooth to_fill = config.io.rt_to_fill filled_smoothened = nodal_proc_monitoring.apply(fill_smooth, offsetstart=window.offsetstart, end=window.end, roll_window_numpts=window.numpts, to_smooth=to_smooth, to_fill=to_fill) filled_smoothened = filled_smoothened[['xz', 'xy', 'x', 'y', 'z', 'name']].reset_index() filled_smoothened['depth'] = filled_smoothened['x']/np.abs(filled_smoothened['x']) * np.sqrt(col.seglen**2 - filled_smoothened['xz']**2 - filled_smoothened['xy']**2) filled_smoothened['depth'] = filled_smoothened['depth'].fillna(value=col.seglen) filled_smoothened['net_dist'] = np.sqrt((filled_smoothened['xz'] ** 2) + (filled_smoothened['xy'] ** 2)) monitoring = filled_smoothened.set_index('ts') if comp_vel == True: filled_smoothened['td'] = filled_smoothened['ts'].values - filled_smoothened['ts'].values[0] filled_smoothened['td'] = filled_smoothened['td'].apply(lambda x: x / np.timedelta64(1,'D')) nodal_filled_smoothened = filled_smoothened.groupby('id') disp_vel = nodal_filled_smoothened.apply(node_inst_vel, roll_window_numpts=window.numpts, start=window.start) disp_vel = disp_vel.reset_index(drop=True) disp_vel = disp_vel.set_index('ts') disp_vel = disp_vel.sort_values('id', ascending=True) else: disp_vel = monitoring return procdata(col,disp_vel.sort(),max_min_df,max_min_cml)