Exemplo n.º 1
0
def zeroing_azimuth(store, tbl, timerange_nord, coefs=None, cfg_in=None):
    """
    azimuth_shift_deg by calculating velocity (Ve, Vn) in cfg_in['timerange_nord'] interval of tbl data:
     taking median, calculating direction, multipling by -1
    :param timerange_nord:
    :param store:
    :param tbl:
    :param coefs: dict with fields having values of array type with sizes:
    'Ag': (3, 3), 'Cg': (3, 1), 'Ah': (3, 3), 'Ch': array(3, 1), 'azimuth_shift_deg': (1,), 'kVabs': (n,)
    :param cfg_in: dict with fields:
        - timerange_nord
        - other, needed in load_hdf5_data() and optionally in incl_calc_velocity_nodask()
    :return: azimuth_shift_deg
    """
    l.debug('Zeroing Nord direction')
    df = load_hdf5_data(store, table=tbl, t_intervals=timerange_nord)
    if df.empty:
        l.info('Zero calibration range out of data scope')
        return
    dfv = incl_calc_velocity_nodask(df,
                                    **coefs,
                                    cfg_filter=cfg_in,
                                    cfg_proc={
                                        'calc_version': 'trigonometric(incl)',
                                        'max_incl_of_fit_deg': 70
                                    })
    dfv.query('10 < inclination & inclination < 170', inplace=True)
    dfv_mean = dfv.loc[:, ['Ve', 'Vn']].median()
    # or df.apply(lambda x: [np.mean(x)], result_type='expand', raw=True)
    # df = incl_calc_velocity_nodask(dfv_mean, **calc_vel_flat_coef(coefs), cfg_in=cfg_in)

    # coefs['M']['A'] = rotate_z(coefs['M']['A'], dfv_mean.Vdir[0])
    azimuth_shift_deg = -np.degrees(np.arctan2(*dfv_mean.to_numpy()))
    l.info('Nord azimuth shifting coef. found: %s degrees', azimuth_shift_deg)
    return azimuth_shift_deg
Exemplo n.º 2
0
def main(new_arg=None):
    """
    1. Obtains command line arguments (for description see my_argparser()) that can be passed from new_arg and ini.file
    also.
    2. Loads device data of calibration in laboratory from cfg['in']['db_path']
    2. Calibrates configured by cfg['in']['channels'] channels ('accelerometer' and/or 'magnetometer'): soft iron
    3. Wrong implementation - not use cfg['in']['timerange_nord']! todo: Rotate compass using cfg['in']['timerange_nord']
    :param new_arg: returns cfg if new_arg=='<cfg_from_args>' but it will be None if argument
     argv[1:] == '-h' or '-v' passed to this code
    argv[1] is cfgFile. It was used with cfg files:

    :return:
    """

    global l

    cfg = cfg_from_args(my_argparser(), new_arg)
    if not cfg:
        return
    if cfg['program']['return'] == '<cfg_from_args>':  # to help testing
        return cfg

    l = init_logging(logging, None, cfg['program']['log'],
                     cfg['program']['verbose'])
    l.info("%s(%s) channels: %s started. ", this_prog_basename(__file__),
           cfg['in']['tables'], cfg['in']['channels'])
    fig = None
    fig_filt = None
    channel = 'accelerometer'  # 'magnetometer'
    fig_save_dir_path = cfg['in']['db_path'].parent
    with pd.HDFStore(cfg['in']['db_path'], mode='r') as store:
        if len(cfg['in']['tables']) == 1:
            cfg['in']['tables'] = h5find_tables(store, cfg['in']['tables'][0])
        coefs = {}
        for itbl, tbl in enumerate(cfg['in']['tables'], start=1):
            probe_number = int(re.findall('\d+', tbl)[0])
            l.info(f'{itbl}. {tbl}: ')
            if isinstance(cfg['in']['timerange'],
                          Mapping):  # individual interval for each table
                if probe_number in cfg['in']['timerange']:
                    timerange = cfg['in']['timerange'][probe_number]
                else:
                    timerange = None
            else:
                timerange = cfg['in'][
                    'timerange']  # same interval for each table
            a = load_hdf5_data(store, table=tbl, t_intervals=timerange)
            # iUseTime = np.searchsorted(stime, [np.array(s, 'datetime64[s]') for s in np.array(strTimeUse)])
            coefs[tbl] = {}
            for channel in cfg['in']['channels']:
                print(f' channel "{channel}"', end=' ')
                (col_str, coef_str) = channel_cols(channel)

                # filtering # col_str == 'A'?
                if True:
                    b_ok = np.zeros(a.shape[0], bool)
                    for component in ['x', 'y', 'z']:
                        b_ok |= is_works(
                            a[col_str + component],
                            noise=cfg['filter']['no_works_noise'][channel])
                    l.info('Filtered not working area: %2.1f%%',
                           (b_ok.size - b_ok.sum()) * 100 / b_ok.size)
                    # vec3d = np.column_stack(
                    #     (a[col_str + 'x'], a[col_str + 'y'], a[col_str + 'z']))[:, b_ok].T  # [slice(*iUseTime.flat)]
                    vec3d = a.loc[
                        b_ok, [col_str + 'x', col_str + 'y', col_str +
                               'z']].to_numpy(float).T
                    index = a.index[b_ok]

                    vec3d, b_ok, fig_filt = filter_channes(
                        vec3d,
                        index,
                        fig_filt,
                        fig_save_prefix=
                        f"{fig_save_dir_path / tbl}-'{channel}'",
                        blocks=cfg['filter']['blocks'],
                        offsets=cfg['filter']['offsets'],
                        std_smooth_sigma=cfg['filter']['std_smooth_sigma'])

                A, b = calibrate(vec3d)
                window_title = f"{tbl} '{channel}' channel ellipse"
                fig = calibrate_plot(vec3d,
                                     A,
                                     b,
                                     fig,
                                     window_title=window_title)
                fig.savefig(fig_save_dir_path / (window_title + '.png'),
                            dpi=300,
                            bbox_inches="tight")
                A_str, b_str = coef2str(A, b)
                l.info(
                    'Calibration coefficients calculated: \nA = \n%s\nb = \n%s',
                    A_str, b_str)
                coefs[tbl][channel] = {'A': A, 'b': b}

            # Zeroing Nord direction
            timerange_nord = cfg['in']['timerange_nord']
            if isinstance(timerange_nord, Mapping):
                timerange_nord = timerange_nord.get(probe_number)
            if timerange_nord:
                coefs[tbl]['M']['azimuth_shift_deg'] = zeroing_azimuth(
                    store, tbl, timerange_nord, calc_vel_flat_coef(coefs[tbl]),
                    cfg['in'])
            else:
                l.info('no zeroing Nord')
    # Write coefs
    for cfg_output in (['in', 'out'] if cfg['out'].get('db_path') else ['in']):
        l.info(f"Write to {cfg[cfg_output]['db_path']}")
        for itbl, tbl in enumerate(cfg['in']['tables'], start=1):
            # i_search = re.search('\d*$', tbl)
            # for channel in cfg['in']['channels']:
            #     (col_str, coef_str) = channel_cols(channel)
            #     dict_matrices = {f'//coef//{coef_str}//A': coefs[tbl][channel]['A'],
            #                      f'//coef//{coef_str}//C': coefs[tbl][channel]['b'],
            #                      }
            #     if channel == 'M':
            #         if coefs[tbl]['M'].get('azimuth_shift_deg'):
            #             dict_matrices[f'//coef//{coef_str}//azimuth_shift_deg'] = coefs[tbl]['M']['azimuth_shift_deg']
            #         # Coping probe number to coefficient to can manually check when copy manually
            #         if i_search:
            #             try:
            #                 dict_matrices['//coef//i'] = int(i_search.group(0))
            #             except Exception as e:
            #                 pass
            dict_matrices = dict_matrices_for_h5(coefs[tbl], tbl,
                                                 cfg['in']['channels'])
            h5copy_coef(None,
                        cfg[cfg_output]['db_path'],
                        tbl,
                        dict_matrices=dict_matrices)
Exemplo n.º 3
0
def dict_matrices_for_h5(coefs=None, tbl=None, channels=None):
    """

    :param coefs: some fields from: {'M': {'A', 'b', 'azimuth_shift_deg'} 'A': {'A', 'b', 'azimuth_shift_deg'}, 'Vabs0'}
    :param tbl: should include probe number in name. Example: "incl_b01"
    :param channels: some from default ['M', 'A']: magnetometer, accelerometer
    :return: dict_matrices
    """
    if channels is None:
        channels = ['M', 'A']

    # Fill coefs where not specified
    dummies = []
    b_have_coefs = coefs is not None
    if not b_have_coefs:
        coefs = {}  # dict(zip(channels,[None]*len(channels)))

    for channel in channels:
        if not coefs.get(channel):
            coefs[channel] = ({
                'A': np.identity(3) * 0.00173,
                'b': np.zeros((3, 1))
            } if channel == 'M' else {
                'A': np.identity(3) * 6.103E-5,
                'b': np.zeros((3, 1))
            })
            if b_have_coefs:
                dummies.append(channel)
        if channel == 'M':
            if not coefs['M'].get('azimuth_shift_deg'):
                coefs['M']['azimuth_shift_deg'] = 180
                if b_have_coefs and not 'M' in dummies:
                    dummies.append(
                        'azimuth_shift_deg'
                    )  # only 'azimuth_shift_deg' for M channel is dummy

    if coefs.get('Vabs0') is None:
        coefs['Vabs0'] = np.float64([10, -10, -10, -3, 3, 70])
        if b_have_coefs:
            dummies.append('Vabs0')

    if dummies or not b_have_coefs:
        l.warning('Coping coefs, %s - dummy!',
                  ','.join(dummies) if b_have_coefs else 'all')
    else:
        l.info('Coping coefs')

    # Fill dict_matrices with coefs values
    dict_matrices = {}
    if tbl:
        # Coping probe number to coefficient to can manually check when copy manually
        i_search = re.search('\d*$', tbl)
        if i_search:
            try:
                dict_matrices['//coef//i'] = int(i_search.group(0))
            except Exception as e:
                pass
        dict_matrices['//coef//Vabs0'] = coefs['Vabs0']

    for channel in channels:
        (col_str, coef_str) = channel_cols(channel)
        dict_matrices.update({
            f'//coef//{coef_str}//A': coefs[channel]['A'],
            f'//coef//{coef_str}//C': coefs[channel]['b'],
        })
        if channel == 'M':
            dict_matrices[f'//coef//{coef_str}//azimuth_shift_deg'] = coefs[
                'M']['azimuth_shift_deg']

    return dict_matrices
Exemplo n.º 4
0
def filter_channes(
    a3d: np.ndarray,
    a_time=None,
    fig=None,
    fig_save_prefix=None,
    blocks=(21, 7),
    offsets=(1.5, 2),
    std_smooth_sigma=4
) -> Tuple[np.ndarray, np.ndarray, matplotlib.figure.Figure]:
    """
    Filter back and forward each column of a3d by despike()
    despike a3d - 3 channels of data and plot data and overlayed results
    :param a3d: shape = (3,len)
    :param a_time:
    :param fig:
    :param fig_save_prefix: save figure to this path + 'despike({ch}).png' suffix
    :param blocks: filter window width - see despike()
    :param offsets: offsets to std - see despike(). Note: filters too many if set some < 3
    :param std_smooth_sigma - see despike()
    :return: a3d[ :,b_ok], b_ok
    """

    dim_length = 1  # dim_channel = 0
    blocks = np.minimum(blocks, a3d.shape[dim_length])
    b_ok = np.ones((a3d.shape[dim_length], ), np.bool8)
    if fig:
        fig.axes[0].clear()
        ax = fig.axes[0]
    else:
        ax = None
    for i, (ch, a) in enumerate(zip(('x', 'y', 'z'), a3d)):
        ax_title = f'despike({ch})'
        ax, lines = make_figure(y_kwrgs=(({
            'data': a,
            'label': 'source',
            'color': 'r',
            'alpha': 1
        }, )),
                                ax_title=ax_title,
                                ax=ax,
                                lines='clear')
        # , mask_kwrgs={'data': b_ok, 'label': 'filtered', 'color': 'g', 'alpha': 0.7}
        b_nan = np.isnan(a)
        n_nans_before = b_nan.sum()
        b_ok &= ~b_nan

        if len(offsets):
            # back and forward:
            a_f = np.float64(a[b_ok][::-1])
            a_f, _ = despike(a_f,
                             offsets,
                             blocks,
                             std_smooth_sigma=std_smooth_sigma)
            a_f, _ = despike(a_f[::-1],
                             offsets,
                             blocks,
                             ax,
                             label=ch,
                             std_smooth_sigma=std_smooth_sigma,
                             x_plot=np.flatnonzero(b_ok))
            b_nan[b_ok] = np.isnan(a_f)
            n_nans_after = b_nan.sum()
            b_ok &= ~b_nan

            # ax, lines = make_figure(y_kwrgs=((
            #     {'data': a, 'label': 'source', 'color': 'r', 'alpha': 1},
            # )), mask_kwrgs={'data': b_ok, 'label': 'filtered', 'color': 'g', 'alpha': 0.7}, ax=ax,
            #     ax_title=f'despike({ch})', lines='clear')

            ax.legend(prop={'size': 10}, loc='upper right')
            l.info('despike(%s, offsets=%s, blocks=%s) deleted %s', ch,
                   offsets, blocks, n_nans_after - n_nans_before)
        plt.show()
        if fig_save_prefix:  # dbstop
            try:
                ax.figure.savefig(fig_save_prefix + (ax_title + '.png'),
                                  dpi=300,
                                  bbox_inches="tight")
            except Exception as e:
                l.warning(f'Can not save fig: {standard_error_info(e)}')
        # Dep_filt = rep2mean(a_f, b_ok, a_time)  # need to execute waveletSmooth on full length

    # ax.plot(np.flatnonzero(b_ok), Depth[b_ok], color='g', alpha=0.9, label=ch)
    return a3d[:, b_ok], b_ok, ax.figure
Exemplo n.º 5
0
def dict_matrices_for_h5(coefs=None,
                         tbl=None,
                         channels=None,
                         to_nested_keys=False):
    """

    :param coefs: some fields from: {'M': {'A', 'b', 'azimuth_shift_deg'} 'A': {'A', 'b', 'azimuth_shift_deg'}, 'Vabs0'}
    :param tbl: should include probe number in name. Example: "incl_b01"
    :param channels: some from default ['M', 'A']: magnetometer, accelerometer
    :param to_nested_keys: 
    :return: dict_matrices
    """

    if channels is None:
        channels = ['M', 'A']

    # Fill coefs where not specified
    dummies = []
    b_have_coefs = coefs is not None

    if b_have_coefs:
        if to_nested_keys is None:
            coefs = coefs.copy()  # todo: deep copy better!
        else:
            # convert coefs fields to be nested under channels

            if to_nested_keys is True:
                to_nested_keys = {
                    'Ch': ('M', 'b'),
                    'Ah': ('M', 'A'),
                    'azimuth_shift_deg': ('M', 'azimuth_shift_deg'),
                    'Cg': ('A', 'b'),
                    'Ag': ('A', 'A')
                }
            channels = set(c for c, k in to_nested_keys.values())
            c = {k: {} for k in channels}
            for channel, (ch_name, coef_name) in to_nested_keys.items():
                c[ch_name][coef_name] = coefs.get(channel)
            c.update(
                {k: v
                 for k, v in coefs.items() if k not in to_nested_keys})
            # old convention
            if c.get('Vabs0') is None and 'kVabs' in c:
                c['Vabs0'] = c.pop('kVabs')
            coefs = c
    else:
        coefs = {}  # dict(zip(channels,[None]*len(channels)))

    for channel in channels:
        if not coefs.get(channel):
            coefs[channel] = ({
                'A': np.identity(3) * 0.00173,
                'b': np.zeros((3, 1))
            } if channel == 'M' else {
                'A': np.identity(3) * 6.103E-5,
                'b': np.zeros((3, 1))
            })
            if b_have_coefs:
                dummies.append(channel)
        if channel == 'M':
            if not coefs['M'].get('azimuth_shift_deg'):
                coefs['M']['azimuth_shift_deg'] = 180
                if b_have_coefs and not 'M' in dummies:
                    dummies.append(
                        'azimuth_shift_deg'
                    )  # only 'azimuth_shift_deg' for M channel is dummy

    if coefs.get('Vabs0') is None:
        coefs['Vabs0'] = np.float64([10, -10, -10, -3, 3, 70])
        if b_have_coefs:
            dummies.append('Vabs0')

    if dummies or not b_have_coefs:
        l.warning('Coping coefs, %s - dummy!',
                  ','.join(dummies) if b_have_coefs else 'all')
    else:
        l.info('Coping coefs')

    # Fill dict_matrices with coefs values
    dict_matrices = {}
    if tbl:
        # Coping probe number to coefficient to can manually check when copy manually
        i_search = re.search('\d*$', tbl)
        if i_search:
            try:
                dict_matrices['//coef//i'] = int(i_search.group(0))
            except Exception as e:
                pass
        dict_matrices['//coef//Vabs0'] = coefs['Vabs0']

    for channel in channels:
        (col_str, coef_str) = channel_cols(channel)
        dict_matrices.update({
            f'//coef//{coef_str}//A': coefs[channel]['A'],
            f'//coef//{coef_str}//C': coefs[channel]['b'],
        })
        if channel == 'M':
            dict_matrices[f'//coef//{coef_str}//azimuth_shift_deg'] = coefs[
                'M']['azimuth_shift_deg']

    return dict_matrices