Exemplo n.º 1
0
def extract_max_values(stack_grid, search_info, f_stack, n_max=5e7):

    # get basic info
    nx = search_info['nx']
    ny = search_info['ny']
    nz = search_info['nz']
    dx = search_info['dx']
    dy = search_info['dy']
    dz = search_info['dz']
    x_orig = search_info['x_orig']
    y_orig = search_info['y_orig']
    z_orig = search_info['z_orig']

    nb, nt = stack_grid.shape

    max_val = f_stack.create_dataset('max_val', (nt, ), 'f')
    max_val_smooth = f_stack.create_dataset('max_val_smooth', (nt, ), 'f')
    max_x = f_stack.create_dataset('max_x', (nt, ), 'f')
    max_y = f_stack.create_dataset('max_y', (nt, ), 'f')
    max_z = f_stack.create_dataset('max_z', (nt, ), 'f')

    # create temporary datasets
    max_ib = f_stack.create_dataset('max_ib', (nt, ), 'i')
    max_ix = f_stack.create_dataset('max_ix', (nt, ), 'i')
    max_iy = f_stack.create_dataset('max_iy', (nt, ), 'i')
    max_iz = f_stack.create_dataset('max_iz', (nt, ), 'i')

    # extract values
    dt = int(n_max / nb)
    if nt <= dt:
        # do the extraction in one step
        max_ib[:] = np.argmax(stack_grid, 0)
        max_val[:] = np.max(stack_grid, 0)

    else:
        # do the extraction in steps
        n = nt / dt
        logging.debug(
            'Number of values exceeds %d. Doing extraction in %d steps' %
            (n_max, n))
        for i in islice(count(0), n):
            max_ib[i * dt:(i + 1) * dt] = np.argmax(
                stack_grid[:, i * dt:(i + 1) * dt], 0)
            max_val[i * dt:(i + 1) * dt] = np.max(
                stack_grid[:, i * dt:(i + 1) * dt], 0)
        max_ib[n * dt:nt] = np.argmax(stack_grid[:, n * dt:nt], 0)
        max_val[n * dt:nt] = np.max(stack_grid[:, n * dt:nt], 0)

    # find the corresponding x,y,z values
    max_ix, max_iy, max_iz = np.unravel_index(max_ib, (nx, ny, nz))
    max_x[:] = max_ix[:] * dx + x_orig
    max_y[:] = max_iy[:] * dy + y_orig
    max_z[:] = max_iz[:] * dz + z_orig
    max_val_smooth[:] = smooth(np.array(max_val), 51)

    # clean up temporary datasets
    del f_stack['max_ib']
    del f_stack['max_ix']
    del f_stack['max_iy']
    del f_stack['max_iz']
Exemplo n.º 2
0
def extract_max_values(stack_grid,search_info,f_stack,n_max=5e7):

  # get basic info
  nx=search_info['nx']
  ny=search_info['ny']
  nz=search_info['nz']
  dx=search_info['dx']
  dy=search_info['dy']
  dz=search_info['dz']
  x_orig=search_info['x_orig']
  y_orig=search_info['y_orig']
  z_orig=search_info['z_orig']

  nb,nt=stack_grid.shape

  max_val=f_stack.create_dataset('max_val',(nt,),'f')
  max_val_smooth=f_stack.create_dataset('max_val_smooth',(nt,),'f')
  max_x=f_stack.create_dataset('max_x',(nt,),'f')
  max_y=f_stack.create_dataset('max_y',(nt,),'f')
  max_z=f_stack.create_dataset('max_z',(nt,),'f')

  # create temporary datasets
  max_ib=f_stack.create_dataset('max_ib',(nt,),'i')
  max_ix=f_stack.create_dataset('max_ix',(nt,),'i')
  max_iy=f_stack.create_dataset('max_iy',(nt,),'i')
  max_iz=f_stack.create_dataset('max_iz',(nt,),'i')

  # extract values
  dt=int(n_max/nb)
  if nt <= dt :
    # do the extraction in one step
    max_ib[:]=np.argmax(stack_grid,0)
    max_val[:]=np.max(stack_grid,0)

  else:
    # do the extraction in steps
    n=nt/dt
    logging.debug('Number of values exceeds %d. Doing extraction in %d steps'%(n_max,n))
    for i in islice(count(0),n):
      max_ib[i*dt:(i+1)*dt]=np.argmax(stack_grid[:,i*dt:(i+1)*dt],0)
      max_val[i*dt:(i+1)*dt]=np.max(stack_grid[:,i*dt:(i+1)*dt],0)
    max_ib[n*dt:nt]=np.argmax(stack_grid[:,n*dt:nt],0)
    max_val[n*dt:nt]=np.max(stack_grid[:,n*dt:nt],0)

  # find the corresponding x,y,z values
  max_ix,max_iy,max_iz=np.unravel_index(max_ib,(nx,ny,nz))
  max_x[:]=max_ix[:]*dx+x_orig
  max_y[:]=max_iy[:]*dy+y_orig
  max_z[:]=max_iz[:]*dz+z_orig
  max_val_smooth[:] = smooth(np.array(max_val),51)

  # clean up temporary datasets
  del f_stack['max_ib']
  del f_stack['max_ix']
  del f_stack['max_iy']
  del f_stack['max_iz']
Exemplo n.º 3
0
  def test_smoothing(self):

    from filters import smooth

    x=np.arange(100)
    max_val=100.*np.exp(-(x-50.)*(x-50.)/(10.*10.))+np.random.rand(100)
    max_x=np.random.rand(100)
    max_y=np.random.rand(100)
    max_z=np.random.rand(100)

    max_val_smooth=smooth(max_val)

    left_trig=right_trig=3
    locs_smooth=trigger_locations_inner(max_val_smooth,max_x,max_y,max_z,left_trig,right_trig,0.0,1.0)
    self.assertAlmostEqual(locs_smooth[0]['o_time'],50.,2)
Exemplo n.º 4
0
def filter_max_stack(st_max, corner):
    # maximum of stack is too rough - filter it
    logging.debug("Doing filtering")

    st_filt = st_max.copy()
    st_filt.clear()

    for tr in st_max.traces:
        tr_filt = tr.copy()
        #x_filt=lowpass(tr_filt.data,corner,1/tr.stats.delta,zerophase=True)
        x_filt = smooth(tr_filt.data)
        tr_filt.data = x_filt
        st_filt.append(tr_filt)

    logging.debug("Done!")

    return st_filt
Exemplo n.º 5
0
def filter_max_stack(st_max,corner):
# maximum of stack is too rough - filter it 
  logging.debug("Doing filtering")

  st_filt = st_max.copy()
  st_filt.clear()

  for tr in st_max.traces:
    tr_filt=tr.copy()
    #x_filt=lowpass(tr_filt.data,corner,1/tr.stats.delta,zerophase=True)
    x_filt=smooth(tr_filt.data)
    tr_filt.data=x_filt
    st_filt.append(tr_filt)


  logging.debug("Done!")

  return st_filt
Exemplo n.º 6
0
    def test_smoothing(self):

        from filters import smooth

        x = np.arange(100)
        max_val = 100. * np.exp(-(x - 50.) * (x - 50.) /
                                (10. * 10.)) + np.random.rand(100)
        max_x = np.random.rand(100)
        max_y = np.random.rand(100)
        max_z = np.random.rand(100)

        max_val_smooth = smooth(max_val)

        left_trig = right_trig = 3
        locs_smooth = trigger_locations_inner(max_val_smooth, max_x, max_y,
                                              max_z, left_trig, right_trig,
                                              0.0, 1.0)
        self.assertAlmostEqual(locs_smooth[0]['o_time'], 50., 2)
Exemplo n.º 7
0
def csv2wav(fname, smoothing_data):
    '''
    Converts a simulation file in .csv format to .wav files
    --------
    Arguments:
        fname : str
            Name of simulation file
        smoothing_data : tuple
            2-tuple containing (1) smoothing window to use for time series and
            (2) length of smoothing window
    '''
    assert is_simulation_data(fname), \
        "Error: file does not seem to have extension .csv!"
    angles, num_samples, time_stamps = load_simulation_data_from_csv(fname)
    USE_TIME_STAMPS = True
    t, angles, num_samples = make_time_series( \
        angles, \
        num_samples, \
        time_stamps, \
        USE_TIME_STAMPS \
    )

    # Smoothing, if requested
    smoothing_window, window_len = smoothing_data
    if smoothing_window != None:
        logString( \
            "Smoothing time series with " + smoothing_window + \
            " window of length " + str(window_len) \
        )
        for i in range(angles.shape[0]):
            angles[i, :] = smooth(angles[i, :],
                                  window_len=window_len,
                                  window=smoothing_window)

    # Make .wav files
    fname_base = os.path.splitext(fname)[0]
    fname = os.path.join(get_data_dir(), fname_base)
    write_wave(angles[OUTER, :], fname + "_outer")
    write_wave(angles[INNER, :], fname + "_inner")
    return
Exemplo n.º 8
0
def analyze(fname, imu_to_plot, estimate, use_calibration, \
    use_legacy_sign_convention, use_time_stamps, plot_slice, \
    make_wav, anim_data, smoothing_data):
    '''
    Visualizes logged data
    --------
    Arguments:
        fname : str
            Name of log file whose data is to be analyzed
        imu_to_plot : str
            Indicates whether the data for the base IMU or the lamp IMU is to
            be analzed, or both
        estimate : bool
            Estimates angles if True, otherwise plots raw data
        use_calibration : bool
            If True, applies rotations and offsets to the raw data, based on the
            contents of the .ini files. This can be used to account for the fact
            that the IMUs are mounted at angles relative to the lamp and base
        use_legacy_sign_convention : bool
            If True, transforms the data set from the old acceleration sign
            convention to the new one. Meant for data sets recorded on MCU
            firmware older than July 2019
        use_time_stamps : bool
            If True, uses the time stamps in the data log to construct the time
            series. Assuming the clock on the computer doing the recording is
            trustworthy, this should be very robust. Missing entries are
            accounted for. Otherwise, the time series is constructed based on
            sampling rate * number of samples, and has no connection to "true
            time"
        plot_slice : string
            String containing start time and end time to plot between
        make_wav : bool
            Indicates whether to make .wav file from angle data
        anim_data : tuple
            3-tuple containing (1) bool indicating whether or not to animate,
            (2) animation type, and (3) animation arguments
        smoothing_data : tuple
            2-tuple containing (1) smoothing window to use for time series and
            (2) length of smoothing window
    '''
    make_data_dir()
    if fname == "latest":
        glob_str = os.path.join(get_data_dir(), '*.dat')
        files = glob.glob(glob_str)
        fname = max(files, key=os.path.getctime)

    imu_data, num_samples, time_stamps = load_data_from_file( \
        fname, \
        use_calibration=use_calibration, \
        use_legacy_sign_convention=use_legacy_sign_convention \
    )
    t, imu_data, num_samples = make_time_series( \
        imu_data, \
        num_samples, \
        time_stamps, \
        use_time_stamps \
    )
    angles = get_angles(imu_data, num_samples)

    # Smoothing, if requested
    smoothing_window, window_len = smoothing_data
    if smoothing_window != None:
        logString( \
            "Smoothing time series with " + smoothing_window + \
            " window of length " + str(window_len) \
        )
        for i in range(angles.shape[0]):
            angles[i, :] = smooth(angles[i, :],
                                  window_len=window_len,
                                  window=smoothing_window)

    if make_wav:
        # Combine the pitch and roll from each IMU into a single value for each
        angles[OUTER:INNER+1,:] = angles[BASE_OUTER:BASE_INNER+1,:] + \
                                  angles[LAMP_OUTER:LAMP_INNER+1,:]
        fname_base = os.path.splitext(fname)[0]
        fname = os.path.join(get_data_dir(), fname_base)
        write_wave(angles[OUTER, :], fname + "_outer")
        write_wave(angles[INNER, :], fname + "_inner")
        return

    if plot_slice:
        t_start, t_end = plot_slice.split(',')
        t_start = np.round(float(t_start), 2)
        t_end = np.round(float(t_end), 2)
        # Bounds check!
        if t_end > t[-1]:
            logString( \
                "--plot slice end time exceeds end time of data ({0})".format(t[-1]) \
            )
            quit()
        start_idx = int(np.round(t_start * (num_samples / (t[-1] - t[0]))))
        end_idx = int(np.round(t_end * (num_samples / (t[-1] - t[0]))))
        # Slice!
        t = t[start_idx:end_idx + 1]
        imu_data = imu_data[:, start_idx:end_idx + 1]
        angles = angles[:, start_idx:end_idx + 1]
        num_samples = end_idx - start_idx + 1

    do_animate, anim_type, anim_args = anim_data
    if do_animate:
        aa = anim.Animate(t, angles, fname)
        if anim_type == 'phase':
            aa.do_phase_space_animation(imu_to_plot)
        elif anim_type == 'top_down':
            do_decomp = anim_args == 'decomp'
            aa.do_birds_eye_view_animation(imu_to_plot, do_decomp)
        elif anim_type == 'pendulum':
            if imu_to_plot == 'lamp' or imu_to_plot == 'both':
                if anim_args == 'outer' or anim_args == 'both':
                    aa.do_pendulum_animation(LAMP_OUTER)
                if anim_args == 'inner' or anim_args == 'both':
                    aa.do_pendulum_animation(LAMP_INNER)
            if imu_to_plot == 'base' or imu_to_plot == 'both':
                if anim_args == 'outer' or anim_args == 'both':
                    aa.do_pendulum_animation(BASE_OUTER)
                if anim_args == 'inner' or anim_args == 'both':
                    aa.do_pendulum_animation(BASE_INNER)
        else:
            assert (False), "Animation processing messed up!"
        return

    fig, ax = plt.subplots()
    size = 2
    if estimate == "none":
        if imu_to_plot == "base" or imu_to_plot == "both":
            ax.plot(t,
                    imu_data[IMU_BASE_IDX + ACC_IDX + X_IDX],
                    c="blue",
                    label="Base Ax")
            ax.plot(t,
                    imu_data[IMU_BASE_IDX + ACC_IDX + Y_IDX],
                    c="red",
                    label="Base Ay")
            ax.plot(t,
                    imu_data[IMU_BASE_IDX + ACC_IDX + Z_IDX],
                    c="green",
                    label="Base Az")
            ax.plot(t,
                    imu_data[IMU_BASE_IDX + GYRO_IDX + X_IDX],
                    c="gold",
                    label="Base Vx")
            ax.plot(t,
                    imu_data[IMU_BASE_IDX + GYRO_IDX + Y_IDX],
                    c="black",
                    label="Base Vy")
            ax.plot(t,
                    imu_data[IMU_BASE_IDX + GYRO_IDX + Z_IDX],
                    c="magenta",
                    label="Base Vz")
        if imu_to_plot == "lamp" or imu_to_plot == "both":
            ax.plot(t,
                    imu_data[IMU_LAMP_IDX + ACC_IDX + X_IDX],
                    c="blue",
                    label="Lamp Ax")
            ax.plot(t,
                    imu_data[IMU_LAMP_IDX + ACC_IDX + Y_IDX],
                    c="red",
                    label="Lamp Ay")
            ax.plot(t,
                    imu_data[IMU_LAMP_IDX + ACC_IDX + Z_IDX],
                    c="green",
                    label="Lamp Az")
            ax.plot(t,
                    imu_data[IMU_LAMP_IDX + GYRO_IDX + X_IDX],
                    c="gold",
                    label="Lamp Vx")
            ax.plot(t,
                    imu_data[IMU_LAMP_IDX + GYRO_IDX + Y_IDX],
                    c="black",
                    label="Lamp Vy")
            ax.plot(t,
                    imu_data[IMU_LAMP_IDX + GYRO_IDX + Z_IDX],
                    c="magenta",
                    label="Lamp Vz")
        ax.legend()

        plt.title('Raw data vs time')
        plt.xlabel('Time (s)')
        plt.ylabel('Raw data ($m/s^2$ and $^\circ$)')

        fig_name = "raw_" + imu_to_plot + "_"
        fig_name += os.path.splitext(os.path.basename(fname))[0]
        if plot_slice:
            fig_name += "_from%.2fto%.2f" % (t_start, t_end)
        fig_name += '.png'
        fig_name = os.path.join(get_data_dir(), os.path.dirname(fname),
                                fig_name)
        plt.savefig(fig_name)
        logString("Saved fig to {0}".format(fig_name))
        plt.close()
    else:
        if estimate == "ind_angles":
            # Plot pitch and roll separately for each IMU
            if imu_to_plot == "base" or imu_to_plot == "both":
                ax.plot(t, angles[BASE_OUTER], c="blue", label="Base (outer)")
                ax.plot(t, angles[BASE_INNER], c="red", label="Base (inner)")
            if imu_to_plot == "lamp" or imu_to_plot == "both":
                ax.plot(t, angles[LAMP_OUTER], c="green", label="Lamp (outer)")
                ax.plot(t, angles[LAMP_INNER], c="gold", label="Lamp (inner)")
            fig_name = estimate + "_" + imu_to_plot + "_imu_"
        else:
            # Combine the pitch and roll from each IMU into a single value for each
            angles[OUTER:INNER+1,:] = angles[BASE_OUTER:BASE_INNER+1,:] + \
                                      angles[LAMP_OUTER:LAMP_INNER+1,:]
            ax.plot(t, angles[OUTER], c="blue", label="Outer gimbal")
            ax.plot(t, angles[INNER], c="red", label="Inner gimbal")
            fig_name = estimate + "_"
        ax.legend()

        plt.title('Angles vs time')
        plt.xlabel('Time (s)')
        plt.ylabel('Angle ($^\circ$)')

        fig_name += os.path.splitext(os.path.basename(fname))[0]
        if plot_slice:
            fig_name += "_from%.2fto%.2f" % (t_start, t_end)
        if use_calibration:
            fig_name += "_calibrated"
        fig_name += '.png'
        fig_name = os.path.join(get_data_dir(), os.path.dirname(fname),
                                fig_name)
        plt.savefig(fig_name)
        logString("Saved fig to {0}".format(fig_name))
        plt.close()
Exemplo n.º 9
0
def extract_max_values(stack_grid,
                       search_info,
                       f_stack,
                       use_ram=False,
                       n_max=5e8):
    """
    Extracts maximum stack value from a 4D migrated grid. Also extracts x_max,
    y_max, z_max. TODO : flesh out this doc-string.

    :param stack_grid:
    :param search_info:
    :param f_stack:
    :param use_fam:
    :param n_max:
    """

    # get basic info
    nx = search_info['nx']
    ny = search_info['ny']
    nz = search_info['nz']
    dx = search_info['dx']
    dy = search_info['dy']
    dz = search_info['dz']
    x_orig = search_info['x_orig']
    y_orig = search_info['y_orig']
    z_orig = search_info['z_orig']

    nb, nt = stack_grid.shape

    if use_ram:
        max_val = np.empty(nt, dtype='float32')
        max_val_smooth = np.empty(nt, dtype='float32')
        max_x = np.empty(nt, dtype='float32')
        max_y = np.empty(nt, dtype='float32')
        max_z = np.empty(nt, dtype='float32')
        # create temporary datasets
        max_ib = np.empty(nt, dtype=int)
        max_ix = np.empty(nt, dtype=int)
        max_iy = np.empty(nt, dtype=int)
        max_iz = np.empty(nt, dtype=int)
    else:
        max_val = f_stack.create_dataset('max_val', (nt, ), 'f')
        max_val_smooth = f_stack.create_dataset('max_val_smooth', (nt, ), 'f')
        max_x = f_stack.create_dataset('max_x', (nt, ), 'f')
        max_y = f_stack.create_dataset('max_y', (nt, ), 'f')
        max_z = f_stack.create_dataset('max_z', (nt, ), 'f')
        # create temporary datasets
        max_ib = f_stack.create_dataset('max_ib', (nt, ), 'i')
        max_ix = f_stack.create_dataset('max_ix', (nt, ), 'i')
        max_iy = f_stack.create_dataset('max_iy', (nt, ), 'i')
        max_iz = f_stack.create_dataset('max_iz', (nt, ), 'i')

    # extract values
    dt = int(n_max / nb)
    if nt <= dt:
        # do the extraction in one step
        max_ib[:] = np.argmax(stack_grid, 0)
        max_val[:] = np.max(stack_grid, 0)

    else:
        # do the extraction in steps
        n = nt / dt
        logging.info('Number of values exceeds %d. Doing extraction in %d\
                      steps' % (n_max, n))
        for i in islice(count(0), n):
            max_ib[i * dt:(i + 1) * dt] = np.argmax(
                stack_grid[:, i * dt:(i + 1) * dt], 0)
            max_val[i * dt:(i + 1) * dt] = np.max(
                stack_grid[:, i * dt:(i + 1) * dt], 0)
        max_ib[n * dt:nt] = np.argmax(stack_grid[:, n * dt:nt], 0)
        max_val[n * dt:nt] = np.max(stack_grid[:, n * dt:nt], 0)

    # find the corresponding x,y,z values
    max_ix, max_iy, max_iz = np.unravel_index(max_ib, (nx, ny, nz))
    max_x[:] = max_ix[:] * dx + x_orig
    max_y[:] = max_iy[:] * dy + y_orig
    max_z[:] = max_iz[:] * dz + z_orig
    max_val_smooth[:] = smooth(np.array(max_val), 51)

    if use_ram:
        # add datasets to hdf5file
        max_val = f_stack.create_dataset('max_val', data=max_val)
        max_val_smooth = f_stack.create_dataset('max_val_smooth',
                                                data=max_val_smooth)
        max_x = f_stack.create_dataset('max_x', data=max_x)
        max_y = f_stack.create_dataset('max_y', data=max_y)
        max_z = f_stack.create_dataset('max_z', data=max_z)
    else:
        # clean up temporary datasets
        del f_stack['max_ib']
        del f_stack['max_ix']
        del f_stack['max_iy']
        del f_stack['max_iz']
Exemplo n.º 10
0
def do_locations_trigger_setup_and_run(opdict):

  base_path=opdict['base_path']
  # parse command line
  data_dir=os.path.join(base_path,'data',opdict['datadir'])
  kurt_files=glob.glob(os.path.join(data_dir,opdict['kurtglob']))
  data_files=glob.glob(os.path.join(data_dir,opdict['dataglob']))
  kurt_files.sort()
  data_files.sort()

  dataglob=opdict['dataglob']

  time_grids=get_interpolated_time_grids(opdict)

  logging.info("Starting log for combine_stacks.")

  out_path=os.path.join(base_path,'out',opdict['outdir'])
  stack_path=os.path.join(out_path,'stack')

  reloc=opdict['reloc']
  if reloc:
    loc_path=os.path.join(out_path,'reloc')
    stack_files=glob.glob(os.path.join(stack_path,'reloc_stack_all*.hdf5'))
    stack_files.sort()
  else:
    loc_path=os.path.join(out_path,'loc')
    stack_files=glob.glob(os.path.join(stack_path,'stack_all*.hdf5'))
    stack_files.sort()

  n_stacks=len(stack_files)
  if n_stacks == 0 :
    raise UserWarning('Empty list of stacks in %s'%(stack_path))

  loc_filename=os.path.join(loc_path,"locations.dat")
  logging.info("Path for stack files : %s"%stack_path)
  logging.info("Path for loc files : %s"%loc_path)
  logging.info("Location file : %s"%loc_filename)

  # DO DATA PREP ACCORDING TO RELOC OR NOT

  logging.info("\nDealing with continuous location, so merging stack files directly ...\n")

  # get basic info from first file
  f_stack = h5py.File(stack_files[0],'r')
  max_val = f_stack['max_val']
  dt = max_val.attrs['dt']
  f_stack.close()

  # get start times  (get first and last times)
  start_times=[]
  end_times=[]
  for fname in stack_files:
    f_stack = h5py.File(fname,'r')
    max_val = f_stack['max_val']
    start_times.append(utcdatetime.UTCDateTime(max_val.attrs['start_time']))
    end_times.append(  utcdatetime.UTCDateTime(max_val.attrs['start_time'])+dt*len(max_val))
    f_stack.close()

  first_start_time = min(start_times)
  last_end_time = max(end_times)

  nt_full=int((last_end_time-first_start_time)/dt)+1


  # create - assume all stacks are of the same length and will be concatenated end to end 
  #          (this will give more than enough space) 
  f = h5py.File(os.path.join(stack_path,'combined_stack_all.hdf5'),'w')
  cmax_val = f.create_dataset('max_val',(nt_full,), 'f', chunks=(nt_full,))
  cmax_x = f.create_dataset('max_x',(nt_full,), 'f', chunks=(nt_full,))
  cmax_y = f.create_dataset('max_y',(nt_full,), 'f', chunks=(nt_full,))
  cmax_z = f.create_dataset('max_z',(nt_full,), 'f', chunks=(nt_full,))

  # concatenate unsmoothed versions of max_val to avoid 
  # problems at file starts and ends
  for i in range(n_stacks):
    f_stack = h5py.File(stack_files[i],'r')
    max_val = f_stack['max_val']
    max_x = f_stack['max_x']
    max_y = f_stack['max_y']
    max_z = f_stack['max_z']

    # get time info for this stack
    nt = len(max_val)
    start_time = utcdatetime.UTCDateTime(max_val.attrs['start_time'])
    ibegin=np.int((start_time-first_start_time)/dt)

    # copy data over into the right place
    cmax_val[ibegin:ibegin+nt] = max_val[:]
    cmax_x[ibegin:ibegin+nt] = max_x[:]
    cmax_y[ibegin:ibegin+nt] = max_y[:]
    cmax_z[ibegin:ibegin+nt] = max_z[:]

    # close the stack
    f_stack.close()

  # create the smoothed version of the max stack
  cmax_val_smooth = f.create_dataset('max_val_smooth',(nt_full,), 'f', chunks=(nt_full,))
  cmax_val_smooth[:] = smooth(np.array(cmax_val),51)

  for name in f:
    dset=f[name]
    dset.attrs['dt']=dt
    dset.attrs['start_time']=first_start_time.isoformat()


  # DO TRIGGERING AND LOCATION
  if opdict['auto_loclevel']:
    loclevel=opdict['snr_loclevel']*np.median(cmax_val_smooth)
    opdict['loclevel']=loclevel
  else:
    loclevel=opdict['loclevel']
  left_trig=loclevel
  right_trig=loclevel

  loc_list=trigger_locations_inner(cmax_val_smooth[:],cmax_x,cmax_y,cmax_z,left_trig,right_trig,first_start_time,dt)
  logging.info('Found %d initial.'%(len(loc_list)))

  # close the stack file
  f.close()

  loc_file=open(loc_filename,'w')
  write_header_options(loc_file,opdict)

  snr_limit=opdict['snr_limit']
  snr_tr_limit=opdict['snr_tr_limit']
  sn_time=opdict['sn_time']
  n_kurt_min=opdict['n_kurt_min']

  n_ok=0
  locs=[]
  for loc in loc_list:
    if number_good_kurtosis_for_location(kurt_files,data_files,loc,time_grids,snr_limit,snr_tr_limit,sn_time) > n_kurt_min:
      logging.info("Max = %.2f, %s - %.2fs + %.2f s, x=%.4f pm %.4f km, y=%.4f pm %.4f km, z=%.4f pm %.4f km"%(loc['max_trig'],loc['o_time'].isoformat(),loc['o_err_left'], loc['o_err_right'],loc['x_mean'],loc['x_sigma'],loc['y_mean'],loc['y_sigma'],loc['z_mean'],loc['z_sigma']))
      loc_file.write("Max = %.2f, %s - %.2f s + %.2f s, x= %.4f pm %.4f km, y= %.4f pm %.4f km, z= %.4f pm %.4f km\n"%(loc['max_trig'],loc['o_time'].isoformat(),loc['o_err_left'], loc['o_err_right'],loc['x_mean'],loc['x_sigma'],loc['y_mean'],loc['y_sigma'],loc['z_mean'],loc['z_sigma']))
      n_ok=n_ok+1
      locs.append(loc)
    else:
      logging.info("Not enough kurtosis picks for : Max = %.2f, %s - %.2fs + %.2fs, x=%.4f pm %.4f, y=%.4f pm %.4f, z=%.4f pm %.4f"%(loc['max_trig'],loc['o_time'].isoformat(),loc['o_err_left'], loc['o_err_right'],loc['x_mean'],loc['x_sigma'],loc['y_mean'],loc['y_sigma'],loc['z_mean'],loc['z_sigma']))
  loc_file.close()
  logging.info('Wrote %d locations to file %s.'%(n_ok,loc_filename))

  return locs
Exemplo n.º 11
0
def do_locations_trigger_setup_and_run(opdict):

    base_path = opdict['base_path']
    # parse command line
    data_dir = os.path.join(base_path, 'data', opdict['datadir'])
    kurt_files = glob.glob(os.path.join(data_dir, opdict['kurtglob']))
    data_files = glob.glob(os.path.join(data_dir, opdict['dataglob']))
    kurt_files.sort()
    data_files.sort()

    dataglob = opdict['dataglob']

    time_grids = get_interpolated_time_grids(opdict)

    logging.info("Starting log for combine_stacks.")

    out_path = os.path.join(base_path, 'out', opdict['outdir'])
    stack_path = os.path.join(out_path, 'stack')

    reloc = opdict['reloc']
    if reloc:
        loc_path = os.path.join(out_path, 'reloc')
        stack_files = glob.glob(
            os.path.join(stack_path, 'reloc_stack_all*.hdf5'))
        stack_files.sort()
    else:
        loc_path = os.path.join(out_path, 'loc')
        stack_files = glob.glob(os.path.join(stack_path, 'stack_all*.hdf5'))
        stack_files.sort()

    n_stacks = len(stack_files)
    if n_stacks == 0:
        raise UserWarning('Empty list of stacks in %s' % (stack_path))

    loc_filename = os.path.join(loc_path, "locations.dat")
    logging.info("Path for stack files : %s" % stack_path)
    logging.info("Path for loc files : %s" % loc_path)
    logging.info("Location file : %s" % loc_filename)

    # DO DATA PREP ACCORDING TO RELOC OR NOT

    logging.info(
        "\nDealing with continuous location, so merging stack files directly ...\n"
    )

    # get basic info from first file
    f_stack = h5py.File(stack_files[0], 'r')
    max_val = f_stack['max_val']
    dt = max_val.attrs['dt']
    f_stack.close()

    # get start times  (get first and last times)
    start_times = []
    end_times = []
    for fname in stack_files:
        f_stack = h5py.File(fname, 'r')
        max_val = f_stack['max_val']
        start_times.append(utcdatetime.UTCDateTime(
            max_val.attrs['start_time']))
        end_times.append(
            utcdatetime.UTCDateTime(max_val.attrs['start_time']) +
            dt * len(max_val))
        f_stack.close()

    first_start_time = min(start_times)
    last_end_time = max(end_times)

    nt_full = int((last_end_time - first_start_time) / dt) + 1

    # create - assume all stacks are of the same length and will be concatenated end to end
    #          (this will give more than enough space)
    f = h5py.File(os.path.join(stack_path, 'combined_stack_all.hdf5'), 'w')
    cmax_val = f.create_dataset('max_val', (nt_full, ),
                                'f',
                                chunks=(nt_full, ))
    cmax_x = f.create_dataset('max_x', (nt_full, ), 'f', chunks=(nt_full, ))
    cmax_y = f.create_dataset('max_y', (nt_full, ), 'f', chunks=(nt_full, ))
    cmax_z = f.create_dataset('max_z', (nt_full, ), 'f', chunks=(nt_full, ))

    # concatenate unsmoothed versions of max_val to avoid
    # problems at file starts and ends
    for i in range(n_stacks):
        f_stack = h5py.File(stack_files[i], 'r')
        max_val = f_stack['max_val']
        max_x = f_stack['max_x']
        max_y = f_stack['max_y']
        max_z = f_stack['max_z']

        # get time info for this stack
        nt = len(max_val)
        start_time = utcdatetime.UTCDateTime(max_val.attrs['start_time'])
        ibegin = np.int((start_time - first_start_time) / dt)

        # copy data over into the right place
        cmax_val[ibegin:ibegin + nt] = max_val[:]
        cmax_x[ibegin:ibegin + nt] = max_x[:]
        cmax_y[ibegin:ibegin + nt] = max_y[:]
        cmax_z[ibegin:ibegin + nt] = max_z[:]

        # close the stack
        f_stack.close()

    # create the smoothed version of the max stack
    cmax_val_smooth = f.create_dataset('max_val_smooth', (nt_full, ),
                                       'f',
                                       chunks=(nt_full, ))
    cmax_val_smooth[:] = smooth(np.array(cmax_val), 51)

    for name in f:
        dset = f[name]
        dset.attrs['dt'] = dt
        dset.attrs['start_time'] = first_start_time.isoformat()

    # DO TRIGGERING AND LOCATION
    if opdict['auto_loclevel']:
        loclevel = opdict['snr_loclevel'] * np.median(cmax_val_smooth)
        opdict['loclevel'] = loclevel
    else:
        loclevel = opdict['loclevel']
    left_trig = loclevel
    right_trig = loclevel

    loc_list = trigger_locations_inner(cmax_val_smooth[:], cmax_x, cmax_y,
                                       cmax_z, left_trig, right_trig,
                                       first_start_time, dt)
    logging.info('Found %d initial.' % (len(loc_list)))

    # close the stack file
    f.close()

    loc_file = open(loc_filename, 'w')
    write_header_options(loc_file, opdict)

    snr_limit = opdict['snr_limit']
    snr_tr_limit = opdict['snr_tr_limit']
    sn_time = opdict['sn_time']
    n_kurt_min = opdict['n_kurt_min']

    n_ok = 0
    locs = []
    for loc in loc_list:
        if number_good_kurtosis_for_location(
                kurt_files, data_files, loc, time_grids, snr_limit,
                snr_tr_limit, sn_time) > n_kurt_min:
            logging.info(
                "Max = %.2f, %s - %.2fs + %.2f s, x=%.4f pm %.4f km, y=%.4f pm %.4f km, z=%.4f pm %.4f km"
                %
                (loc['max_trig'], loc['o_time'].isoformat(), loc['o_err_left'],
                 loc['o_err_right'], loc['x_mean'], loc['x_sigma'],
                 loc['y_mean'], loc['y_sigma'], loc['z_mean'], loc['z_sigma']))
            loc_file.write(
                "Max = %.2f, %s - %.2f s + %.2f s, x= %.4f pm %.4f km, y= %.4f pm %.4f km, z= %.4f pm %.4f km\n"
                %
                (loc['max_trig'], loc['o_time'].isoformat(), loc['o_err_left'],
                 loc['o_err_right'], loc['x_mean'], loc['x_sigma'],
                 loc['y_mean'], loc['y_sigma'], loc['z_mean'], loc['z_sigma']))
            n_ok = n_ok + 1
            locs.append(loc)
        else:
            logging.info(
                "Not enough kurtosis picks for : Max = %.2f, %s - %.2fs + %.2fs, x=%.4f pm %.4f, y=%.4f pm %.4f, z=%.4f pm %.4f"
                %
                (loc['max_trig'], loc['o_time'].isoformat(), loc['o_err_left'],
                 loc['o_err_right'], loc['x_mean'], loc['x_sigma'],
                 loc['y_mean'], loc['y_sigma'], loc['z_mean'], loc['z_sigma']))
    loc_file.close()
    logging.info('Wrote %d locations to file %s.' % (n_ok, loc_filename))

    return locs
Exemplo n.º 12
0
    def process_kurtosis(self,
                         win,
                         recursive=False,
                         pre_rmean=False,
                         pre_taper=False,
                         post_taper=True):
        """
        Processing waveform using kurtosis (from statlib package).
        Calls filters.sw_kurtosis1(), and overwrites the waveform.
        Sets self.prof to 'Kurtosis'

        :param win: length of the window (in seconds) on which to calculate the
                kurtosis
        :param recursive: If ``True`` applies recursive kurtosis calculation
        :param pre_rmean: If ``True`` removes mean of signal before processing.
        :param pre_taper: If ``True`` applies taper to signal before
            processing.
        :param post_taper: If ``True`` applies taper to signal after
            processing.

        :type recursive: boolean, optional
        :type pre_rmean: boolean, optional
        :type pre_taper: boolean, optional
        :type post_taper: boolean, optional

        """

        logging.info("Applying kurtosis to single traces, window = %.2f s\n" %
                     win)

        dt = self.dt

        if pre_rmean:
            self.rmean()

        if pre_taper:
            self.taper()

        # process each trace independently
        for itr in range(self.stream.count()):
            tr = self.stream.traces[itr]
            starttime = tr.stats.starttime
            x = tr.data

            npts = len(tr.data)

            xs = np.zeros(npts)

            if recursive:
                C = 1 - dt / float(win)
                xs = rec_kurtosis_old(x, C)
                # Chassande-Mottin style kurtosis
                #C1=dt/float(win)
                #xs=rec_kurtosis(x,C1)
                # smooth xs
                try:
                    xs_filt = smooth(xs)
                except ValueError:
                    xs_filt = xs

            else:
                # run the sliding window kurtosis
                nwin = int(win / dt)
                if len(x) > 3 * nwin:
                    xs = sw_kurtosis1(x, nwin)
                    xs_filt = smooth(xs)
                    # fix up the starttime of the trace
                    tr.stats.starttime = starttime + (nwin - 1) * dt
                else:
                    xs_filt = xs

            # Save xs values as waveform
            tr.data = xs_filt

            # put trace back into stream
            self.stream.traces[itr] = tr

        # apply taper after kurtosis calculation if required
        if post_taper:
            self.taper()

        # set the process flag
        self.proc = 'Kurtosis'
Exemplo n.º 13
0
def extract_max_values(stack_grid, search_info, f_stack, use_ram=False,
                       n_max=5e8):
    """
    Extracts maximum stack value from a 4D migrated grid. Also extracts x_max,
    y_max, z_max. TODO : flesh out this doc-string.

    :param stack_grid:
    :param search_info:
    :param f_stack:
    :param use_fam:
    :param n_max:
    """

    # get basic info
    nx = search_info['nx']
    ny = search_info['ny']
    nz = search_info['nz']
    dx = search_info['dx']
    dy = search_info['dy']
    dz = search_info['dz']
    x_orig = search_info['x_orig']
    y_orig = search_info['y_orig']
    z_orig = search_info['z_orig']

    nb, nt = stack_grid.shape

    if use_ram:
        max_val = np.empty(nt, dtype='float32')
        max_val_smooth = np.empty(nt, dtype='float32')
        max_x = np.empty(nt, dtype='float32')
        max_y = np.empty(nt, dtype='float32')
        max_z = np.empty(nt, dtype='float32')
        # create temporary datasets
        max_ib = np.empty(nt, dtype=int)
        max_ix = np.empty(nt, dtype=int)
        max_iy = np.empty(nt, dtype=int)
        max_iz = np.empty(nt, dtype=int)
    else:
        max_val = f_stack.create_dataset('max_val', (nt, ), 'f')
        max_val_smooth = f_stack.create_dataset('max_val_smooth', (nt, ), 'f')
        max_x = f_stack.create_dataset('max_x', (nt, ), 'f')
        max_y = f_stack.create_dataset('max_y', (nt, ), 'f')
        max_z = f_stack.create_dataset('max_z', (nt, ), 'f')
        # create temporary datasets
        max_ib = f_stack.create_dataset('max_ib', (nt, ), 'i')
        max_ix = f_stack.create_dataset('max_ix', (nt, ), 'i')
        max_iy = f_stack.create_dataset('max_iy', (nt, ), 'i')
        max_iz = f_stack.create_dataset('max_iz', (nt, ), 'i')

    # extract values
    dt = int(n_max/nb)
    if nt <= dt:
        # do the extraction in one step
        max_ib[:] = np.argmax(stack_grid, 0)
        max_val[:] = np.max(stack_grid, 0)

    else:
        # do the extraction in steps
        n = nt/dt
        logging.info('Number of values exceeds %d. Doing extraction in %d\
                      steps' % (n_max, n))
        for i in islice(count(0), n):
            max_ib[i*dt:(i+1)*dt] = np.argmax(stack_grid[:, i*dt:(i+1)*dt], 0)
            max_val[i*dt:(i+1)*dt] = np.max(stack_grid[:, i*dt:(i+1)*dt], 0)
        max_ib[n*dt:nt] = np.argmax(stack_grid[:, n*dt:nt], 0)
        max_val[n*dt:nt] = np.max(stack_grid[:, n*dt:nt], 0)

    # find the corresponding x,y,z values
    max_ix, max_iy, max_iz = np.unravel_index(max_ib, (nx, ny, nz))
    max_x[:] = max_ix[:]*dx+x_orig
    max_y[:] = max_iy[:]*dy+y_orig
    max_z[:] = max_iz[:]*dz+z_orig
    max_val_smooth[:] = smooth(np.array(max_val), 51)

    if use_ram:
        # add datasets to hdf5file
        max_val = f_stack.create_dataset('max_val', data=max_val)
        max_val_smooth = f_stack.create_dataset('max_val_smooth',
                                                data=max_val_smooth)
        max_x = f_stack.create_dataset('max_x', data=max_x)
        max_y = f_stack.create_dataset('max_y', data=max_y)
        max_z = f_stack.create_dataset('max_z', data=max_z)
    else:
        # clean up temporary datasets
        del f_stack['max_ib']
        del f_stack['max_ix']
        del f_stack['max_iy']
        del f_stack['max_iz']
Exemplo n.º 14
0
    def process_kurtosis(self, win, recursive=False, pre_rmean=False,
                         pre_taper=False, post_taper=True):
        """
        Processing waveform using kurtosis (from statlib package).
        Calls filters.sw_kurtosis1(), and overwrites the waveform.
        Sets self.prof to 'Kurtosis'

        :param win: length of the window (in seconds) on which to calculate the
                kurtosis
        :param recursive: If ``True`` applies recursive kurtosis calculation
        :param pre_rmean: If ``True`` removes mean of signal before processing.
        :param pre_taper: If ``True`` applies taper to signal before
            processing.
        :param post_taper: If ``True`` applies taper to signal after
            processing.

        :type recursive: boolean, optional
        :type pre_rmean: boolean, optional
        :type pre_taper: boolean, optional
        :type post_taper: boolean, optional

        """

        logging.info("Applying kurtosis to single traces, window = %.2f s\n" %
                     win)

        dt = self.dt

        if pre_rmean:
            self.rmean()

        if pre_taper:
            self.taper()

        # process each trace independently
        for itr in range(self.stream.count()):
            tr = self.stream.traces[itr]
            starttime = tr.stats.starttime
            x = tr.data

            npts = len(tr.data)

            xs = np.zeros(npts)

            if recursive:
                C = 1-dt/float(win)
                xs = rec_kurtosis_old(x, C)
                # Chassande-Mottin style kurtosis
                #C1=dt/float(win)
                #xs=rec_kurtosis(x,C1)
                # smooth xs
                try:
                    xs_filt = smooth(xs)
                except ValueError:
                    xs_filt = xs

            else:
                # run the sliding window kurtosis
                nwin = int(win/dt)
                if len(x) > 3*nwin:
                    xs = sw_kurtosis1(x, nwin)
                    xs_filt = smooth(xs)
                    # fix up the starttime of the trace
                    tr.stats.starttime = starttime + (nwin-1)*dt
                else:
                    xs_filt = xs

            # Save xs values as waveform
            tr.data = xs_filt

            # put trace back into stream
            self.stream.traces[itr] = tr

        # apply taper after kurtosis calculation if required
        if post_taper:
            self.taper()

        # set the process flag
        self.proc = 'Kurtosis'
Exemplo n.º 15
0
def kurto(origin_time, info, opdict):
    """
    Finds for each Waveloc event and for each station the best filtering 
    parameters for kurtosis computation.
    Writes them into the dictionary info.

    :param origin_time: origin time of the signal
    :param info: dictionary of parameters
    :param opdict: dictionary of the Waveloc parameters and options

    :type origin_time: utcdatetime
    :type info: dictionary
    :type opdict: dictionary

    :rtype: dictionary
    :returns: info
    """
    verbose = opdict['verbose']
    kwin = opdict['kwin']

    start_time = origin_time-5.0
    end_time = origin_time+20.0
    dt = info['dt']

    # Trace
    x = waveval(info['data_ini'], start_time, end_time, dt, info['tdeb_data'])
    if not x.any() and x.all():
        return info

    # Initial kurtosis (trace filtered between 4-10Hz)
    kurtx = waveval(info['kurt_ini'], start_time, end_time, dt,
                    info['tdeb_kurt'])
    kurtx = smooth(kurtx)

    N = len(x)
    N2 = np.log2(N)-7
    nlevel = int(np.fix(N2))

    snr_ref = np.max(np.abs(x))/np.mean(np.abs(x))
    snr_kurt_ref = np.max(np.abs(kurtx))/np.mean(np.abs(kurtx))
    kmax_ref = np.max(kurtx) # maximum of the kurtosis

    # Compute the kurtogram and keep best frequencies
    if verbose:
        import matplotlib.gridspec as gridspec
        G = gridspec.GridSpec(3, 2)
        fig = plt.figure(figsize=(15, 6))
        fig.set_facecolor('white')
        fig.add_subplot(G[:, 0])

    Kwav, Level_w, freq_w, c, f_lower, f_upper = \
        Fast_Kurtogram(np.array(x, dtype=float), nlevel, verbose, Fs=1/dt,
                       opt2=1)

    # Comparison of the kurtosis computed in the new frequency band and the old
    # one (criterion : snr, kmax)
    # 1. Read the initial data
    wf = Waveform()
    wf.read_from_file(info['data_file'], starttime=start_time-kwin,
                      endtime=end_time+kwin)

    nbpts = int(kwin*1./dt)

    # 2. Filter the trace with kurtogram frequencies
    wf.bp_filter(f_lower, f_upper)
    x_filt = wf.values
    x_filt = x_filt[nbpts:-nbpts]

    # 3. Compute the kurtosis
    wf.process_kurtosis(kwin, recursive=opdict['krec'])
    new_kurtx = wf.values
    if opdict['krec']:
        new_kurtx = new_kurtx[nbpts+1:-nbpts-1]
    else:
        new_kurtx = new_kurtx[:-nbpts-1]

    snr = np.max(np.abs(x_filt))/np.mean(np.abs(x_filt))
    snr_kurt = np.max(np.abs(new_kurtx))/np.mean(np.abs(new_kurtx))
    kmax = np.max(new_kurtx)

    if snr > snr_ref and kmax >= kmax_ref:
        info['filter'].append((round(f_lower*100)/100, round(f_upper*100)/100))
        if 'new_kurt_file' in info:
            info = write_file(info, start_time, end_time, new_kurtx)
    else:
        info['filter'].append((0, 50))

    if verbose and snr > 3:
        print "snr:", snr, " ; snr_ref:", snr_ref
        print "snr new kurtosis:", snr_kurt, " ; snr kurtosis reference:",\
            snr_kurt_ref
        print "kurtosis max, kurt_ref :", kmax, kmax_ref
        plot_trace(fig, G, x, x_filt, kurtx, new_kurtx, info, f_lower,
                   f_upper, snr, snr_ref, snr_kurt, kmax, kmax_ref,
                   origin_time)
        plt.show()

    return info
Exemplo n.º 16
0
def kurto(origin_time, info, opdict):
    """
    Finds for each Waveloc event and for each station the best filtering 
    parameters for kurtosis computation.
    Writes them into the dictionary info.

    :param origin_time: origin time of the signal
    :param info: dictionary of parameters
    :param opdict: dictionary of the Waveloc parameters and options

    :type origin_time: utcdatetime
    :type info: dictionary
    :type opdict: dictionary

    :rtype: dictionary
    :returns: info
    """
    verbose = opdict['verbose']
    kwin = opdict['kwin']

    start_time = origin_time - 5.0
    end_time = origin_time + 20.0
    dt = info['dt']

    # Trace
    x = waveval(info['data_ini'], start_time, end_time, dt, info['tdeb_data'])
    if not x.any() and x.all():
        return info

    # Initial kurtosis (trace filtered between 4-10Hz)
    kurtx = waveval(info['kurt_ini'], start_time, end_time, dt,
                    info['tdeb_kurt'])
    kurtx = smooth(kurtx)

    N = len(x)
    N2 = np.log2(N) - 7
    nlevel = int(np.fix(N2))

    snr_ref = np.max(np.abs(x)) / np.mean(np.abs(x))
    snr_kurt_ref = np.max(np.abs(kurtx)) / np.mean(np.abs(kurtx))
    kmax_ref = np.max(kurtx)  # maximum of the kurtosis

    # Compute the kurtogram and keep best frequencies
    if verbose:
        import matplotlib.gridspec as gridspec
        G = gridspec.GridSpec(3, 2)
        fig = plt.figure(figsize=(15, 6))
        fig.set_facecolor('white')
        fig.add_subplot(G[:, 0])

    Kwav, Level_w, freq_w, c, f_lower, f_upper = \
        Fast_Kurtogram(np.array(x, dtype=float), nlevel, verbose, Fs=1/dt,
                       opt2=1)

    # Comparison of the kurtosis computed in the new frequency band and the old
    # one (criterion : snr, kmax)
    # 1. Read the initial data
    wf = Waveform()
    wf.read_from_file(info['data_file'],
                      starttime=start_time - kwin,
                      endtime=end_time + kwin)

    nbpts = int(kwin * 1. / dt)

    # 2. Filter the trace with kurtogram frequencies
    wf.bp_filter(f_lower, f_upper)
    x_filt = wf.values
    x_filt = x_filt[nbpts:-nbpts]

    # 3. Compute the kurtosis
    wf.process_kurtosis(kwin, recursive=opdict['krec'])
    new_kurtx = wf.values
    if opdict['krec']:
        new_kurtx = new_kurtx[nbpts + 1:-nbpts - 1]
    else:
        new_kurtx = new_kurtx[:-nbpts - 1]

    snr = np.max(np.abs(x_filt)) / np.mean(np.abs(x_filt))
    snr_kurt = np.max(np.abs(new_kurtx)) / np.mean(np.abs(new_kurtx))
    kmax = np.max(new_kurtx)

    if snr > snr_ref and kmax >= kmax_ref:
        info['filter'].append(
            (round(f_lower * 100) / 100, round(f_upper * 100) / 100))
        if 'new_kurt_file' in info:
            info = write_file(info, start_time, end_time, new_kurtx)
    else:
        info['filter'].append((0, 50))

    if verbose and snr > 3:
        print "snr:", snr, " ; snr_ref:", snr_ref
        print "snr new kurtosis:", snr_kurt, " ; snr kurtosis reference:",\
            snr_kurt_ref
        print "kurtosis max, kurt_ref :", kmax, kmax_ref
        plot_trace(fig, G, x, x_filt, kurtx, new_kurtx, info, f_lower, f_upper,
                   snr, snr_ref, snr_kurt, kmax, kmax_ref, origin_time)
        plt.show()

    return info