コード例 #1
0
def main(site, end):

    window, config = rtw.getwindow(end)

    monwinTS = pd.date_range(start=window.end - timedelta(hours=3),
                             end=window.end,
                             freq='30Min')
    trending_alert = pd.DataFrame({
        'site': [np.nan] * len(monwinTS),
        'alert': [np.nan] * len(monwinTS),
        'timestamp': monwinTS,
        'source': [np.nan] * len(monwinTS)
    })
    trending_alert = trending_alert[['timestamp', 'site', 'source', 'alert']]

    col = q.GetSensorList(site)

    monitoring = g.genproc(col[0], window, config, config.io.column_fix)
    lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name)

    trending_alertTS = trending_alert.groupby('timestamp')
    output = trending_alertTS.apply(trending_alertgen,
                                    window=window,
                                    config=config,
                                    monitoring=monitoring,
                                    lgd=lgd)

    site_level_alert = output.loc[output.timestamp == window.end]
    site_level_alert['updateTS'] = [window.end]

    return site_level_alert
コード例 #2
0
def main(name='',custom_end = ''):
    if name == '':
        name = sys.argv[1].lower()

    start = datetime.now()

    print "=========================== {} {} =========================".format(str(name), custom_end)
    window,config = rtw.getwindow(end = custom_end )
    col = q.GetSensorList(name)
    monitoring = g.genproc(col[0], window, config, config.io.column_fix)
    lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name)
    
    
    monitoring_vel = monitoring.vel[window.start:window.end]
    monitoring_vel = monitoring_vel.reset_index().sort_values('ts',ascending=True)
    nodal_dv = monitoring_vel.groupby('id')     
    
    alert = nodal_dv.apply(node_alert2, colname=monitoring.colprops.name, num_nodes=monitoring.colprops.nos, T_disp=config.io.t_disp, T_velL2=config.io.t_vell2, T_velL3=config.io.t_vell3, k_ac_ax=config.io.k_ac_ax, lastgooddata=lgd,window=window,config=config)
    alert = column_alert(alert, config.io.num_nodes_to_check, config.io.k_ac_ax)
    
    not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus(1).site == name].node.values
    
    for i in not_working:
        alert = alert.loc[alert.id != i]

    if 'L3' in list(alert.col_alert.values):
        site_alert = 'L3'
    elif 'L2' in list(alert.col_alert.values):
        site_alert = 'L2'
    else:
        site_alert = min(getmode(list(alert.col_alert.values)))
        
    column_level_alert = pd.DataFrame({'timestamp': [window.end], 'site': [monitoring.colprops.name], 'source': ['sensor'], 'alert': [site_alert], 'updateTS': [window.end]})
    
    print column_level_alert
    
    if site_alert in ('L2', 'L3'):
        A.main(monitoring.colprops.name,custom_end)
    else:
        alert_toDB(column_level_alert, 'column_level_alert', window)
    
    write_site_alert(monitoring.colprops.name, window)

#######################

    query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' and source = 'public' ORDER BY updateTS DESC LIMIT 1" %monitoring.colprops.name[0:3]
    public_alert = q.GetDBDataFrame(query)
    if public_alert.alert.values[0] != 'A0' or RoundTime(pd.to_datetime(public_alert.timestamp.values[0])) == RoundTime(window.end):
        plot_time = ['07:30:00', '19:30:00']
        if str(window.end.time()) in plot_time:
            print "Plotter.main(monitoring, window, config)"
    elif RoundTime(pd.to_datetime(public_alert.timestamp.values[0])) == RoundTime(window.end):
        print "Plotter.main(monitoring, window, config)"

#######################

    print 'run time =', datetime.now()-start
    
    return column_level_alert,monitoring
コード例 #3
0
def generate_proc(colname, num_nodes, seg_len, custom_end,f=False,for_plots=False):
    
    #1. setting date boundaries for real-time monitoring window
#    roll_window_numpts=int(1+roll_window_length/data_dt)
    roll_window_numpts=int(1+roll_window_length/data_dt)
    end, start, offsetstart,monwin=get_rt_window(rt_window_length,roll_window_numpts,num_roll_window_ops,custom_end)

    # generating proc monitoring data for each site
    print "Generating PROC monitoring data for:-->> %s - %s <<--" %(str(colname),str(num_nodes))


    #3. getting accelerometer data for site 'colname'
    monitoring=qdb.GetRawAccelData(colname,offsetstart)
    if f:
        if for_plots:
            monitoring = ffd.filt(monitoring,keep_orig=True)
            return monitoring
        else:
            monitoring = ffd.filt(monitoring)

    else:
        monitoring = monitoring.loc[(monitoring.ts >= offsetstart) & (monitoring.ts <= end)]
     
    #3.1 identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,num_nodes,offsetstart)
#    print NodesNoInitVal
    #4: get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = qdb.GetSingleLGDPM(colname, node, offsetstart.strftime("%Y-%m-%d %H:%M"))
        temp = fsd.applyFilters(temp)
        temp = temp.sort_index(ascending = False)[0:1]        
        lgdpm = lgdpm.append(temp,ignore_index=True)
 
    #5 TODO: Resample the dataframe together with the LGDOM
    monitoring=monitoring.append(lgdpm)

    #6. evaluating which data needs to be filtered
#    try:
    monitoring=fsd.applyFilters(monitoring)		
    LastGoodData=qdb.GetLastGoodData(monitoring,num_nodes)		
    qdb.PushLastGoodData(LastGoodData,colname)		
    LastGoodData = qdb.GetLastGoodDataFromDb(colname)		
    print 'Done'		
	
		
    if len(LastGoodData)<num_nodes: print colname, " Missing nodes in LastGoodData"		
		
    #5. extracting last data outside monitoring window		
    LastGoodData=LastGoodData[(LastGoodData.ts<offsetstart)]		
		
    #6. appending LastGoodData to monitoring		
    monitoring=monitoring.append(LastGoodData)    

    
    #7. replacing date of data outside monitoring window with first date of monitoring window
    monitoring.loc[monitoring.ts < offsetstart, ['ts']] = offsetstart

    #8. computing corresponding horizontal linear displacements (xz,xy), and appending as columns to dataframe
    monitoring['xz'],monitoring['xy']=accel_to_lin_xz_xy(seg_len,monitoring.x.values,monitoring.y.values,monitoring.z.values)
    
    #9. removing unnecessary columns x,y,z
    monitoring=monitoring.drop(['x','y','z'],axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])

    #10. setting ts as index
    monitoring=monitoring.set_index('ts')

    #11. reordering columns
    monitoring=monitoring[['id','xz','xy']]
    
    return monitoring,monwin
コード例 #4
0
def node_alert(colname, xz_tilt, xy_tilt, xz_vel, xy_vel, num_nodes, T_disp, T_velL2, T_velL3, k_ac_ax,end):

   #initializing DataFrame object, alert
    alert=pd.DataFrame(data=None)

    #adding node IDs
    alert['id']=[n for n in range(1,1+num_nodes)]
    alert=alert.set_index('id')

    #checking for nodes with no data
    LastGoodData= qdb.GetLastGoodDataFromDb(colname)
    LastGoodData=LastGoodData[:num_nodes]
    cond = np.asarray((LastGoodData.ts< end - timedelta(hours=3)))
    if len(LastGoodData)<num_nodes:
        x=np.ones(num_nodes-len(LastGoodData),dtype=bool)
        cond=np.append(cond,x)
    alert['ND']=np.where(cond,
                         
                         #No data within valid date 
                         np.nan,
                         
                         #Data present within valid date
                         np.ones(len(alert)))
    
    #evaluating net displacements within real-time window
    alert['xz_disp']=np.round(xz_tilt.values[-1]-xz_tilt.values[0], 3)
    alert['xy_disp']=np.round(xy_tilt.values[-1]-xy_tilt.values[0], 3)

    #determining minimum and maximum displacement
    cond = np.asarray(np.abs(alert['xz_disp'].values)<np.abs(alert['xy_disp'].values))
    min_disp=np.round(np.where(cond,
                               np.abs(alert['xz_disp'].values),
                               np.abs(alert['xy_disp'].values)), 4)
    cond = np.asarray(np.abs(alert['xz_disp'].values)>=np.abs(alert['xy_disp'].values))
    max_disp=np.round(np.where(cond,
                               np.abs(alert['xz_disp'].values),
                               np.abs(alert['xy_disp'].values)), 4)

    #checking if displacement threshold is exceeded in either axis    
    cond = np.asarray((np.abs(alert['xz_disp'].values)>T_disp, np.abs(alert['xy_disp'].values)>T_disp))
    alert['disp_alert']=np.where(np.any(cond, axis=0),

                                 #disp alert=2
                                 np.where(min_disp/max_disp<k_ac_ax,
                                          np.zeros(len(alert)),
                                          np.ones(len(alert))),

                                 #disp alert=0
                                 np.zeros(len(alert)))
    
    #getting minimum axis velocity value
    alert['min_vel']=np.round(np.where(np.abs(xz_vel.values[-1])<np.abs(xy_vel.values[-1]),
                                       np.abs(xz_vel.values[-1]),
                                       np.abs(xy_vel.values[-1])), 4)

    #getting maximum axis velocity value
    alert['max_vel']=np.round(np.where(np.abs(xz_vel.values[-1])>=np.abs(xy_vel.values[-1]),
                                       np.abs(xz_vel.values[-1]),
                                       np.abs(xy_vel.values[-1])), 4)
                                       
    #checking if proportional velocity is present across node
    alert['vel_alert']=np.where(alert['min_vel'].values/alert['max_vel'].values<k_ac_ax,   

                                #vel alert=0
                                np.zeros(len(alert)),    

                                #checking if max node velocity exceeds threshold velocity for alert 1
                                np.where(alert['max_vel'].values<=T_velL2,                  

                                         #vel alert=0
                                         np.zeros(len(alert)),

                                         #checking if max node velocity exceeds threshold velocity for alert 2
                                         np.where(alert['max_vel'].values<=T_velL3,         

                                                  #vel alert=1
                                                  np.ones(len(alert)),

                                                  #vel alert=2
                                                  np.ones(len(alert))*2)))
    
    alert['node_alert']=np.where(alert['vel_alert'].values >= alert['disp_alert'].values,

                                 #node alert takes the higher perceive risk between vel alert and disp alert
                                 alert['vel_alert'].values,                                

                                 alert['disp_alert'].values)


    alert['disp_alert']=alert['ND']*alert['disp_alert']
    alert['vel_alert']=alert['ND']*alert['vel_alert']
    alert['node_alert']=alert['ND']*alert['node_alert']
    alert['ND']=alert['ND'].map({0:1,1:1})
    alert['ND']=alert['ND'].fillna(value=0)
    alert['disp_alert']=alert['disp_alert'].fillna(value=-1)
    alert['vel_alert']=alert['vel_alert'].fillna(value=-1)
    alert['node_alert']=alert['node_alert'].fillna(value=-1)

    #rearrange columns
    alert=alert.reset_index()
    cols=colarrange
    alert = alert[cols]
 
    return alert
コード例 #5
0
def main(name='', end='', end_mon=False):
    start = datetime.now()

    if name == '':
        name = sys.argv[1].lower()

    if end == '':
        try:
            end = pd.to_datetime(sys.argv[2])
            if end > start + timedelta(hours=0.5):
                print 'invalid timestamp'
                return
        except:
            end = datetime.now()
    else:
        end = pd.to_datetime(end)

    window, config = rtw.getwindow(end)

    col = q.GetSensorList(name)
    monitoring = g.genproc(col[0], window, config, config.io.column_fix)
    lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name)

    monitoring_vel = monitoring.disp_vel[window.start:window.end]
    monitoring_vel = monitoring_vel.reset_index().sort_values('ts',
                                                              ascending=True)
    nodal_dv = monitoring_vel.groupby('id')

    alert = nodal_dv.apply(node_alert2,
                           colname=monitoring.colprops.name,
                           num_nodes=monitoring.colprops.nos,
                           T_disp=config.io.t_disp,
                           T_velL2=config.io.t_vell2,
                           T_velL3=config.io.t_vell3,
                           k_ac_ax=config.io.k_ac_ax,
                           lastgooddata=lgd,
                           window=window,
                           config=config)
    alert['col_alert'] = -1
    col_alert = pd.DataFrame({
        'id': range(1, monitoring.colprops.nos + 1),
        'col_alert': [-1] * monitoring.colprops.nos
    })
    node_col_alert = col_alert.groupby('id', as_index=False)
    node_col_alert.apply(column_alert,
                         alert=alert,
                         num_nodes_to_check=config.io.num_nodes_to_check,
                         k_ac_ax=config.io.k_ac_ax,
                         T_velL2=config.io.t_vell2,
                         T_velL3=config.io.t_vell3)

    alert['node_alert'] = alert['node_alert'].map({
        -1: 'ND',
        0: 'L0',
        1: 'L2',
        2: 'L3'
    })
    alert['col_alert'] = alert['col_alert'].map({
        -1: 'ND',
        0: 'L0',
        1: 'L2',
        2: 'L3'
    })

    not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus(1).site ==
                                         name].node.values

    for i in not_working:
        alert = alert.loc[alert.id != i]

    if 'L3' in list(alert.col_alert.values):
        site_alert = 'L3'
    elif 'L2' in list(alert.col_alert.values):
        site_alert = 'L2'
    else:
        site_alert = min(getmode(list(alert.col_alert.values)))

    column_level_alert = pd.DataFrame({
        'timestamp': [window.end],
        'site': [monitoring.colprops.name],
        'source': ['noadjfilt'],
        'alert': [site_alert],
        'updateTS': [window.end]
    })

    if site_alert in ('L2', 'L3'):
        column_level_alert = A.main(monitoring.colprops.name, window.end)

    alert_toDB(column_level_alert, 'column_level_alert', window)

    write_site_alert(monitoring.colprops.name, window)

    print column_level_alert
    print 'run time =', datetime.now() - start

    return column_level_alert
コード例 #6
0
def genproc(col, window, config, fixpoint, realtime=False):

    monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end)

    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal = GetNodesWithNoInitialData(monitoring, col.nos,
                                               window.offsetstart)

    #get last good data prior to the monitoring window (LGDPM)
    lgdpm = pd.DataFrame()
    for node in NodesNoInitVal:
        temp = q.GetSingleLGDPM(col.name, node,
                                window.offsetstart.strftime("%Y-%m-%d %H:%M"))
        lgdpm = lgdpm.append(temp, ignore_index=True)
    monitoring = monitoring.append(lgdpm)

    try:
        monitoring = flt.applyFilters(monitoring)
        LastGoodData = q.GetLastGoodData(monitoring, col.nos)
        q.PushLastGoodData(LastGoodData, col.name)
        LastGoodData = q.GetLastGoodDataFromDb(col.name)

    except:
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
        print 'error'

    if len(LastGoodData) < col.nos:
        print col.name, " Missing nodes in LastGoodData"

    monitoring = monitoring.loc[monitoring.id <= col.nos]

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart) |
                   (pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(
        col.seglen, monitoring.x.values, monitoring.y.values,
        monitoring.z.values)

    monitoring = monitoring.drop(['x', 'y', 'z'], axis=1)
    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')
    monitoring = monitoring[['name', 'id', 'xz', 'xy']]

    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({
        'name': [0] * len(nodes_noval),
        'id': nodes_noval,
        'xy': [np.nan] * len(nodes_noval),
        'xz': [np.nan] * len(nodes_noval),
        'ts': [window.offsetstart] * len(nodes_noval)
    })
    nodes_nodata = nodes_nodata.set_index('ts')
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config,
                                                      fixpoint, col.nos)

    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(
        resamplenode, window=window).reset_index(level=1).set_index('ts')

    nodal_proc_monitoring = monitoring.groupby('id')

    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill

    filled_smoothened = nodal_proc_monitoring.apply(
        fill_smooth,
        offsetstart=window.offsetstart,
        end=window.end,
        roll_window_numpts=window.numpts,
        to_smooth=to_smooth,
        to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'name']].reset_index()

    monitoring = filled_smoothened.set_index('ts')

    filled_smoothened[
        'td'] = filled_smoothened.ts.values - filled_smoothened.ts.values[0]
    filled_smoothened['td'] = filled_smoothened['td'].apply(
        lambda x: x / np.timedelta64(1, 'D'))

    nodal_filled_smoothened = filled_smoothened.groupby('id')

    disp_vel = nodal_filled_smoothened.apply(node_inst_vel,
                                             roll_window_numpts=window.numpts,
                                             start=window.start)
    disp_vel = disp_vel[['ts', 'xz', 'xy', 'vel_xz', 'vel_xy',
                         'name']].reset_index()
    disp_vel = disp_vel[['ts', 'id', 'xz', 'xy', 'vel_xz', 'vel_xy', 'name']]
    disp_vel = disp_vel.set_index('ts')
    disp_vel = disp_vel.sort_values('id', ascending=True)

    #    return procdata(col,monitoring.sort(),disp_vel.sort_index(),max_min_df,max_min_cml)
    return procdata(col, monitoring.sort_index(), disp_vel.sort_index(),
                    max_min_df, max_min_cml)
コード例 #7
0
def main(name='', end=datetime.now(), end_mon=False):
    if name == '':
        name = sys.argv[1].lower()

    window, config = rtw.getwindow(end)

    col = q.GetSensorList(name)
    monitoring = g.genproc(col[0], window, config, config.io.column_fix)
    lgd = q.GetLastGoodDataFromDb(monitoring.colprops.name)

    monitoring_vel = monitoring.vel[window.start:window.end]
    monitoring_vel = monitoring_vel.reset_index().sort_values('ts',
                                                              ascending=True)
    nodal_dv = monitoring_vel.groupby('id')

    alert = nodal_dv.apply(node_alert2,
                           colname=monitoring.colprops.name,
                           num_nodes=monitoring.colprops.nos,
                           T_disp=config.io.t_disp,
                           T_velL2=config.io.t_vell2,
                           T_velL3=config.io.t_vell3,
                           k_ac_ax=config.io.k_ac_ax,
                           lastgooddata=lgd,
                           window=window,
                           config=config)
    alert = column_alert(alert, config.io.num_nodes_to_check,
                         config.io.k_ac_ax)

    not_working = q.GetNodeStatus(1).loc[q.GetNodeStatus(1).site ==
                                         name].node.values

    for i in not_working:
        alert = alert.loc[alert.id != i]

    if 'L3' in list(alert.col_alert.values):
        site_alert = 'L3'
    elif 'L2' in list(alert.col_alert.values):
        site_alert = 'L2'
    else:
        site_alert = min(getmode(list(alert.col_alert.values)))

    column_level_alert = pd.DataFrame({
        'timestamp': [window.end],
        'site': [monitoring.colprops.name],
        'source': ['sensor'],
        'alert': [site_alert],
        'updateTS': [window.end]
    })

    if site_alert in ('L2', 'L3'):
        column_level_alert = A.main(monitoring.colprops.name, window.end)

    alert_toDB(column_level_alert, 'column_level_alert', window)

    print column_level_alert

    write_site_alert(monitoring.colprops.name, window)

    #######################

    if monitoring.colprops.name == 'mesta':
        colname = 'msu'
    elif monitoring.colprops.name == 'messb':
        colname = 'msl'
    else:
        colname = monitoring.colprops.name[0:3]
    query = "SELECT * FROM senslopedb.site_level_alert WHERE site = '%s' and source = 'public' and timestamp <= '%s' and updateTS >= '%s' ORDER BY updateTS DESC LIMIT 1" % (
        colname, window.end, window.end - timedelta(hours=0.5))
    public_alert = q.GetDBDataFrame(query)
    if public_alert.alert.values[0] != 'A0':
        plot_time = ['07:30:00', '19:30:00']
        if str(window.end.time()) in plot_time or end_mon:
            plotter.main(monitoring,
                         window,
                         config,
                         plotvel_start=window.end - timedelta(hours=3),
                         plotvel_end=window.end,
                         realtime=False)
    elif RoundTime(pd.to_datetime(
            public_alert.timestamp.values[0])) == RoundTime(window.end):
        plotter.main(monitoring,
                     window,
                     config,
                     plotvel_start=window.end - timedelta(hours=3),
                     plotvel_end=window.end,
                     realtime=False)

#######################

    return column_level_alert
コード例 #8
0
def genproc(col, window, config, fixpoint, realtime=False, comp_vel=True):
    
    monitoring = q.GetRawAccelData(col.name, window.offsetstart, window.end)

    monitoring = flt.applyFilters(monitoring)
    
    try:
        LastGoodData = q.GetLastGoodData(monitoring,col.nos)
        q.PushLastGoodData(LastGoodData,col.name)		
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
    except:	
        LastGoodData = q.GetLastGoodDataFromDb(col.name)
   
    #identify the node ids with no data at start of monitoring window
    NodesNoInitVal=GetNodesWithNoInitialData(monitoring,col.nos,window.offsetstart)
    
    #get last good data prior to the monitoring window (LGDPM)
    if len(NodesNoInitVal) != 0:
        lgdpm = q.GetSingleLGDPM(col.name, NodesNoInitVal, window.offsetstart)
        if len(lgdpm) != 0:
            lgdpm = flt.applyFilters(lgdpm)
            lgdpm = lgdpm.sort_index(ascending = False).drop_duplicates('id')
        
        if len(lgdpm) != 0:
            monitoring=monitoring.append(lgdpm)
        
    monitoring = monitoring.loc[monitoring.id <= col.nos]

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart)|(pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    invalid_nodes = q.GetNodeStatus(1)
    invalid_nodes = invalid_nodes[invalid_nodes.site == col.name]
    if len(invalid_nodes) != 0:
        stat = invalid_nodes.groupby('node', as_index=False)
        monitoring = stat.apply(remove_invalid, df=monitoring)
    nodes_noval = GetNodesWithNoData(monitoring, col.nos)
    nodes_nodata = pd.DataFrame({'name': [0]*len(nodes_noval), 'id': nodes_noval,
                'x': [0]*len(nodes_noval), 'y': [0]*len(nodes_noval),
                'z': [0]*len(nodes_noval), 'ts': [window.offsetstart]*len(nodes_noval)})
    monitoring = monitoring.append(nodes_nodata)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, config, fixpoint, col.nos)

    monitoring['xz'], monitoring['xy'] = accel_to_lin_xz_xy(col.seglen,monitoring.x.values,monitoring.y.values,monitoring.z.values)

    monitoring = monitoring.drop_duplicates(['ts', 'id'])
    monitoring = monitoring.set_index('ts')
        
    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('id').apply(resamplenode, window = window).reset_index(level=1).set_index('ts')
    
    nodal_proc_monitoring = monitoring.groupby('id')
    
    if not realtime:
        to_smooth = config.io.to_smooth
        to_fill = config.io.to_fill
    else:
        to_smooth = config.io.rt_to_smooth
        to_fill = config.io.rt_to_fill
    
    filled_smoothened = nodal_proc_monitoring.apply(fill_smooth, offsetstart=window.offsetstart, end=window.end, roll_window_numpts=window.numpts, to_smooth=to_smooth, to_fill=to_fill)
    filled_smoothened = filled_smoothened[['xz', 'xy', 'x', 'y', 'z', 'name']].reset_index()
            
    filled_smoothened['depth'] = filled_smoothened['x']/np.abs(filled_smoothened['x']) * np.sqrt(col.seglen**2 - filled_smoothened['xz']**2 - filled_smoothened['xy']**2)
    filled_smoothened['depth'] = filled_smoothened['depth'].fillna(value=col.seglen)
    filled_smoothened['net_dist'] = np.sqrt((filled_smoothened['xz'] ** 2) + (filled_smoothened['xy'] ** 2))

    monitoring = filled_smoothened.set_index('ts') 
    
    if comp_vel == True:
        filled_smoothened['td'] = filled_smoothened['ts'].values - filled_smoothened['ts'].values[0]
        filled_smoothened['td'] = filled_smoothened['td'].apply(lambda x: x / np.timedelta64(1,'D'))
        
        nodal_filled_smoothened = filled_smoothened.groupby('id') 
        
        disp_vel = nodal_filled_smoothened.apply(node_inst_vel, roll_window_numpts=window.numpts, start=window.start)
        disp_vel = disp_vel.reset_index(drop=True)
        disp_vel = disp_vel.set_index('ts')
        disp_vel = disp_vel.sort_values('id', ascending=True)
    else:
        disp_vel = monitoring
    
    return procdata(col,disp_vel.sort(),max_min_df,max_min_cml)