def pre_proc(tsm_name, n):
    sensor = 'tilt_' + tsm_name

    start, end, nodes, ts = percent_movement(tsm_name, n)

    df = data_query(start, end, sensor)
    filtered = filt.apply_filters(df)
    data = accel_to_lin_xz_xy(filtered, seg_len=1.5)

    data['td'] = data.ts.values - data.ts.values[0]
    data['td'] = data['td'].apply(lambda x: x / np.timedelta64(1, 'D'))

    data = data[data['node_id'].isin(nodes)]
    data['xz'] = data.groupby('node_id')['xz'].transform(lambda v: v.ffill())
    data['xy'] = data.groupby('node_id')['xy'].transform(lambda v: v.ffill())

    #    data['xy'].fillna(method='ffill', inplace = True)
    #    data['xz'].fillna(method='ffill', inplace = True)

    return data, nodes, ts
#    actual = np.array(actual)
    
    return accel


def confusion_mat(actual, predicted):
        
    tn, fp, fn, tp = confusion_matrix(actual, predicted).ravel()
    
    TPR = (tp / (tp + fn))
    FPR = (fp / (fp + tn))
    
    return TPR, FPR

data = data(start, end, sensor)
filtered = filt.apply_filters(data)
data = accel_to_lin_xz_xy(data, seg_len)


#end,start,offsetstart = get_rt_window(3, 7,1,pd.to_datetime(data.tail(1).ts.values[0]))

#data = fill_smooth(data.set_index('ts'),offsetstart,end,7,0,1)
#data = data.reset_index()

data['td'] = data.ts.values - data.ts.values[0]

data['td'] = data['td'].apply(lambda x:x / np.timedelta64(1,'D'))
n=25
#n = data.node_id.max() + 1 

cols = np.arange(1,n+1)
コード例 #3
0
ファイル: proc.py プロジェクト: jgeliberte/cbews_iloilo
def proc_data(tsm_props,
              window,
              sc,
              realtime=False,
              comp_vel=True,
              analysis=True):

    monitoring = qdb.get_raw_accel_data(tsm_name=tsm_props.tsm_name,
                                        from_time=window.offsetstart,
                                        to_time=window.end,
                                        analysis=analysis)
    monitoring = monitoring.loc[monitoring.node_id <= tsm_props.nos]

    monitoring = filt.apply_filters(monitoring)

    #identify the node ids with no data at start of monitoring window
    no_init_val = no_initial_data(monitoring, tsm_props.nos,
                                  window.offsetstart)

    #get last good data prior to the monitoring window (LGDPM)
    if len(no_init_val) != 0:
        lgdpm = qdb.get_single_lgdpm(tsm_props.tsm_name,
                                     no_init_val,
                                     window.offsetstart,
                                     analysis=analysis)
        lgdpm = filt.apply_filters(lgdpm)
        lgdpm = lgdpm.sort_index(ascending=False).drop_duplicates('node_id')

        monitoring = monitoring.append(lgdpm, sort=False)

    invalid_nodes = qdb.get_node_status(tsm_props.tsm_id,
                                        ts=window.offsetstart)
    monitoring = monitoring.loc[~monitoring.node_id.isin(invalid_nodes)]

    lgd = get_last_good_data(monitoring)

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart) |
                   (pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    monitoring = accel_to_lin_xz_xy(monitoring, tsm_props.seglen)

    monitoring = monitoring.drop_duplicates(['ts', 'node_id'])
    monitoring = monitoring.set_index('ts')
    monitoring = monitoring[['tsm_name', 'node_id', 'xz', 'xy']]

    nodes_noval = no_data(monitoring, tsm_props.nos)
    nodes_nodata = pd.DataFrame({
        'tsm_name': [tsm_props.tsm_name] * len(nodes_noval),
        'node_id':
        nodes_noval,
        'xy': [np.nan] * len(nodes_noval),
        'xz': [np.nan] * len(nodes_noval),
        'ts': [window.offsetstart] * len(nodes_noval)
    })
    nodes_nodata = nodes_nodata.set_index('ts')
    monitoring = monitoring.append(nodes_nodata, sort=False)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, sc,
                                                      tsm_props.nos)

    #resamples xz and xy values per node using forward fill
    monitoring = monitoring.groupby('node_id', as_index=False).apply(
        resample_node, window=window).reset_index(drop=True).set_index('ts')

    if not realtime:
        to_smooth = int(sc['subsurface']['to_smooth'])
        to_fill = int(sc['subsurface']['to_fill'])
    else:
        to_smooth = int(sc['subsurface']['rt_to_smooth'])
        to_fill = int(sc['subsurface']['rt_to_fill'])

    tilt = monitoring.groupby('node_id', as_index=False).apply(
        fill_smooth,
        offsetstart=window.offsetstart,
        end=window.end,
        roll_window_numpts=window.numpts,
        to_smooth=to_smooth,
        to_fill=to_fill).reset_index(level='ts').set_index('ts')

    if comp_vel == True:
        tilt.loc[:, 'td'] = tilt.index.values - \
                                            monitoring.index.values[0]
        tilt.loc[:, 'td'] = tilt['td'].apply(lambda x: x / \
                                            np.timedelta64(1,'D'))

        nodal_filled_smoothened = tilt.groupby('node_id', as_index=False)

        tilt = nodal_filled_smoothened.apply(node_inst_vel,
                                             roll_window_numpts=window.numpts,
                                             start=window.start)
        tilt = tilt.drop(['td'], axis=1)
        tilt = tilt.sort_values('node_id', ascending=True)
        tilt = tilt.reset_index(level='ts').set_index('ts')

    return ProcData(invalid_nodes, tilt.sort_index(), lgd, max_min_df,
                    max_min_cml)
コード例 #4
0
def proc_subsurface(tsm_props, window, sc):
    monitoring = qdb.get_raw_accel_data(tsm_name=tsm_props.tsm_name,
                                        from_time=window.offsetstart,
                                        to_time=window.end)

    monitoring = monitoring.loc[monitoring.node_id <= tsm_props.nos]
    monitoring = filt.apply_filters(monitoring)
    monitoring = monitoring.groupby('node_id', as_index=False).apply(
        magnitude, tsm_props.seglen)
    monitoring = theta_yz(monitoring)

    #identify the node ids with no data at start of monitoring window
    no_init_val = no_initial_data(monitoring, tsm_props.nos,
                                  window.offsetstart)

    #get last good data prior to the monitoring window (LGDPM)
    if len(no_init_val) != 0:
        lgdpm = qdb.get_single_lgdpm(tsm_props.tsm_name, no_init_val,
                                     window.offsetstart)
        lgdpm = filt.apply_filters(lgdpm)
        lgdpm = lgdpm.sort_index(ascending=False).drop_duplicates('node_id')

        monitoring = monitoring.append(lgdpm, sort=False)

    invalid_nodes = qdb.get_node_status(tsm_props.tsm_id,
                                        ts=window.offsetstart)
    monitoring = monitoring.loc[~monitoring.node_id.isin(invalid_nodes)]

    lgd = get_last_good_data(monitoring)

    #assigns timestamps from LGD to be timestamp of offsetstart
    monitoring.loc[(monitoring.ts < window.offsetstart) |
                   (pd.isnull(monitoring.ts)), ['ts']] = window.offsetstart

    monitoring = monitoring.drop_duplicates(['ts', 'node_id'])
    monitoring = monitoring.set_index('ts')
    #monitoring = monitoring.loc[monitoring['node_id'].isin(range(1,8,1))] select nodes

    monitoring = monitoring[[
        'tsm_name', 'node_id', 'x', 'y', 'z', 'magnitude', 'theta_yz'
    ]]

    nodes_noval = no_data(monitoring, tsm_props.nos)
    nodes_nodata = pd.DataFrame({
        'tsm_name': [tsm_props.tsm_name] * len(nodes_noval),
        'node_id':
        nodes_noval,
        'magnitude': [np.nan] * len(nodes_noval),
        'theta_yz': [np.nan] * len(nodes_noval),
        'ts': [window.offsetstart] * len(nodes_noval)
    })
    nodes_nodata = nodes_nodata.set_index('ts')
    monitoring = monitoring.append(nodes_nodata, sort=False)

    #    print ('\n\n\n####### monitoring #######\n\n\n', monitoring)

    max_min_df, max_min_cml = err.cml_noise_profiling(monitoring, sc,
                                                      tsm_props.nos)

    monitoring = monitoring.groupby('node_id', as_index=False).apply(
        proc.resample_node,
        window=window).reset_index(drop=True).set_index('ts')

    to_smooth = int(sc['subsurface']['to_smooth'])
    to_fill = int(sc['subsurface']['to_fill'])

    monitoring = monitoring.groupby('node_id', as_index=False).apply(
        fill_smooth,
        offsetstart=window.offsetstart,
        end=window.end,
        roll_window_numpts=window.numpts,
        to_smooth=to_smooth,
        to_fill=to_fill).reset_index(level='ts').set_index('ts')

    monitoring.loc[:,
                   'td'] = monitoring.index.values - monitoring.index.values[0]
    monitoring.loc[:, 'td'] = monitoring['td'].apply(lambda x: x / \
                                            np.timedelta64(1,'D'))

    monitoring = monitoring.groupby('node_id', as_index=False).apply(
        slope_intercept, roll_window_numpts=window.numpts,
        start=window.start).reset_index(level='ts').set_index('ts')
    monitoring = displacement(monitoring)

    monitoring = monitoring.groupby('node_id', as_index=False).apply(
        node_inst_vel, roll_window_numpts=window.numpts,
        start=window.start).reset_index(level='ts').set_index('ts')

    #    monitoring = monitoring.groupby('node_id', as_index=False).apply(accel,
    #                                            roll_window_numpts=window.numpts,
    #                                            start=window.start).reset_index(level='ts').set_index('ts')

    tilt = alert_generator(monitoring, 0.05, 0.032)

    return ProcData(invalid_nodes, tilt, lgd, max_min_df, max_min_cml)