Example #1
0
    def test_expected_values(self):

        dims = (20, 30, 40, 50)
        grid4D = np.zeros(dims)
        x0 = np.linspace(0, 4, dims[0])
        x1 = np.linspace(0, 6, dims[1])
        x2 = np.linspace(0, 8, dims[2])
        x3 = np.linspace(0, 10, dims[3])
        grid4D[1, 2, 3, 7] = 2
        grid4D[1, 2, 4, 7] = 4  # add something interesting to find
        grid4D[1, 2, 5, 7] = 2
        my_exp0 = x0[1]
        my_exp1 = x1[2]
        my_exp2 = x2[4]
        my_exp3 = x3[7]

        exp0, exp1, exp2, exp3, cov_matrix = \
            compute_expected_coordinates4D(grid4D, x0, x1, x2, x3)
        var_x0 = cov_matrix[0, 0]
        var_x1 = cov_matrix[1, 1]
        var_x2 = cov_matrix[2, 2]
        var_x3 = cov_matrix[3, 3]

        self.assertAlmostEqual(my_exp0, exp0, 7)
        self.assertAlmostEqual(my_exp1, exp1, 7)
        self.assertAlmostEqual(my_exp2, exp2, 7)
        self.assertAlmostEqual(my_exp3, exp3, 7)

        self.assertAlmostEqual(var_x0, 0.0, 7)
        self.assertAlmostEqual(var_x1, 0.0, 7)
        self.assertAlmostEqual(var_x2, 0.0210, 4)
        self.assertAlmostEqual(var_x3, 0.0, 7)
Example #2
0
    def test_expected_values(self):

        dims = (20, 30, 40, 50)
        grid4D = np.zeros(dims)
        x0 = np.linspace(0, 4, dims[0])
        x1 = np.linspace(0, 6, dims[1])
        x2 = np.linspace(0, 8, dims[2])
        x3 = np.linspace(0, 10, dims[3])
        grid4D[1, 2, 3, 7] = 2
        grid4D[1, 2, 4, 7] = 4   # add something interesting to find
        grid4D[1, 2, 5, 7] = 2
        my_exp0 = x0[1]
        my_exp1 = x1[2]
        my_exp2 = x2[4]
        my_exp3 = x3[7]

        exp0, exp1, exp2, exp3, cov_matrix = \
            compute_expected_coordinates4D(grid4D, x0, x1, x2, x3)
        var_x0 = cov_matrix[0, 0]
        var_x1 = cov_matrix[1, 1]
        var_x2 = cov_matrix[2, 2]
        var_x3 = cov_matrix[3, 3]

        self.assertAlmostEqual(my_exp0, exp0, 7)
        self.assertAlmostEqual(my_exp1, exp1, 7)
        self.assertAlmostEqual(my_exp2, exp2, 7)
        self.assertAlmostEqual(my_exp3, exp3, 7)

        self.assertAlmostEqual(var_x0, 0.0, 7)
        self.assertAlmostEqual(var_x1, 0.0, 7)
        self.assertAlmostEqual(var_x2, 0.0210, 4)
        self.assertAlmostEqual(var_x3, 0.0, 7)
Example #3
0
def do_locations_prob_setup_and_run(opdict):

  # get / set info
  base_path=opdict['base_path']
  space_only = opdict['probloc_spaceonly']

  locfile=os.path.join(base_path,'out',opdict['outdir'],'loc','locations.dat')
  locfile_prob=os.path.join(base_path,'out',opdict['outdir'],'loc','locations_prob.dat')
  locfile_hdf5=os.path.join(base_path,'out',opdict['outdir'],'loc','locations_prob.hdf5')
  f_prob=open(locfile_prob,'w')

  # if locfile does not exist then make it by running trigger location
  if not os.path.exists(locfile):
    logging.info('No location found at %s.  Running trigger location first...'%locfile)
    do_locations_trigger_setup_and_run(opdict)

  # directories
  grid_dir=os.path.join(base_path,'out',opdict['outdir'],'grid')
  output_dir=os.path.join(base_path,'out',opdict['outdir'])

  # data files
  data_dir=os.path.join(base_path,'data',opdict['datadir'])
  data_glob=opdict['dataglob']
  kurt_glob=opdict['kurtglob']
  grad_glob=opdict['gradglob']
  data_files=glob.glob(os.path.join(data_dir,data_glob))
  kurt_files=glob.glob(os.path.join(data_dir,kurt_glob))
  grad_files=glob.glob(os.path.join(data_dir,grad_glob))
  data_files.sort()
  kurt_files.sort()
  grad_files.sort()

  # stations
  stations_filename=os.path.join(base_path,'lib',opdict['stations'])
  stations=read_stations_file(stations_filename)

  # grids
  grid_filename_base=os.path.join(base_path,'lib',opdict['time_grid'])
  search_grid_filename=os.path.join(base_path,'lib',opdict['search_grid'])

  # read time grid information
  time_grids=get_interpolated_time_grids(opdict)

  # read locations
  locs=read_locs_from_file(locfile)

  # prepare file for output of marginals
  f_marginals = h5py.File(locfile_hdf5,'w')

  # iterate over locations
  for loc in locs:

    # create the appropriate grid on the fly

    # generate the grids
    o_time=loc['o_time']
    if space_only:
        start_time=o_time
        end_time  =o_time
    else:
        start_time=o_time-3*loc['o_err_left']
        end_time=o_time+3*loc['o_err_right']

    # make a buffer for migration
    start_time_migration = start_time - 10.0
    end_time_migration   =   end_time + 10.0

    # re-read grid info to ensure clean copy
    grid_info=read_hdr_file(search_grid_filename)
 
    # read data
    grad_dict,delta = read_data_compatible_with_time_dict(grad_files,
          time_grids, start_time_migration, end_time_migration)

    # do migration (all metadata on grid is added to grid_info)
    do_migration_loop_continuous(opdict, grad_dict, delta,
          start_time_migration, grid_info, time_grids, keep_grid=True)


    # integrate to get the marginal probability density distributions

    # get required info
    grid_starttime=grid_info['start_time']
    nx,ny,nz,nt=grid_info['grid_shape']
    dx,dy,dz,dt=grid_info['grid_spacing']
    x_orig,y_orig,z_orig=grid_info['grid_orig']

    # we are only interested in the time around the origin time of the event
    it_left  = np.int(np.round((start_time - grid_starttime)/dt))
    it_right = np.int(np.round((end_time   - grid_starttime)/dt))
    it_true  = np.int(np.round((o_time     - grid_starttime)/dt))
    nt=(it_right-it_left)+1

    # set up integration axes (wrt reference)
    x=np.arange(nx)*dx
    y=np.arange(ny)*dy
    z=np.arange(nz)*dz
    if not space_only:
      t=np.arange(nt)*dt

    # open the grid file
    grid_filename=grid_info['dat_file']
    f=h5py.File(grid_filename,'r')
    stack_grid=f['stack_grid']

    # extract the portion of interest (copy data)
    if space_only:
        stack_3D=np.empty((nx,ny,nz))
        stack_3D[:] = stack_grid[:,it_true].reshape(nx,ny,nz)
    else:
        stack_4D=np.empty((nx,ny,nz,nt))
        stack_4D[:] = stack_grid[:,it_left:it_right+1].reshape(nx,ny,nz,nt)

    # close the grid file
    f.close()

    # Get expected values (normalizes grid internally)
    if space_only:
        exp_x, exp_y, exp_z, cov_matrix, prob_dict = \
            compute_expected_coordinates3D(stack_3D,x,y,z,return_2Dgrids=True)
    else:
        exp_x, exp_y, exp_z, exp_t, cov_matrix, prob_dict = \
            compute_expected_coordinates4D(stack_4D,x,y,z,t,return_2Dgrids=True)
    


    # put reference location back
    exp_x = exp_x + x_orig
    exp_y = exp_y + y_orig
    exp_z = exp_z + z_orig
    if space_only:
        exp_t = o_time
    else:
        exp_t = start_time + exp_t

    # extract uncertainties from covariance matrix
    if space_only:
        sig_x,sig_y,sig_z = np.sqrt(np.diagonal(cov_matrix))
        sig_t = (loc['o_err_left']+loc['o_err_right'])/2.
    else:
        sig_x,sig_y,sig_z,sig_t = np.sqrt(np.diagonal(cov_matrix))


    # save the marginals to a hdf5 file in loc subdirectory (f_marginals)
    # each event becomes a group in this one file
    grp = f_marginals.create_group(exp_t.isoformat())
    grp.create_dataset('x',data=x+x_orig)
    grp.create_dataset('y',data=y+y_orig)
    grp.create_dataset('z',data=z+z_orig)
    grp.create_dataset('prob_x',data=prob_dict['prob_x0'])
    grp.create_dataset('prob_y',data=prob_dict['prob_x1'])
    grp.create_dataset('prob_z',data=prob_dict['prob_x2'])
    grp.create_dataset('prob_xy',data=prob_dict['prob_x0_x1'])
    grp.create_dataset('prob_xz',data=prob_dict['prob_x0_x2'])
    grp.create_dataset('prob_yz',data=prob_dict['prob_x1_x2'])
    if not space_only:
        grp.create_dataset('t',data=t-(o_time - start_time))
        grp.create_dataset('prob_t',data=prob_dict['prob_x3'])
        grp.create_dataset('prob_xt',data=prob_dict['prob_x0_x3'])
        grp.create_dataset('prob_yt',data=prob_dict['prob_x1_x3'])
        grp.create_dataset('prob_zt',data=prob_dict['prob_x2_x3'])


    # write the expected values to a plain text locations file

    f_prob.write("PROB DENSITY : T = %s s pm %.2f s, x= %.4f pm %.4f km, \
y= %.4f pm %.4f km, z= %.4f pm %.4f km\n" % (exp_t.isoformat(), sig_t, \
      exp_x, sig_x, exp_y, sig_y, exp_z, sig_z))

  # close location files
  f_prob.close()
  f_marginals.close()
Example #4
0
def do_locations_prob_setup_and_run(opdict):

    # get / set info
    base_path = opdict['base_path']
    space_only = opdict['probloc_spaceonly']

    locfile = os.path.join(base_path, 'out', opdict['outdir'], 'loc',
                           'locations.dat')
    locfile_prob = os.path.join(base_path, 'out', opdict['outdir'], 'loc',
                                'locations_prob.dat')
    locfile_hdf5 = os.path.join(base_path, 'out', opdict['outdir'], 'loc',
                                'locations_prob.hdf5')
    f_prob = open(locfile_prob, 'w')

    # if locfile does not exist then make it by running trigger location
    if not os.path.exists(locfile):
        logging.info(
            'No location found at %s.  Running trigger location first...' %
            locfile)
        do_locations_trigger_setup_and_run(opdict)

    # directories
    grid_dir = os.path.join(base_path, 'out', opdict['outdir'], 'grid')
    output_dir = os.path.join(base_path, 'out', opdict['outdir'])

    # data files
    data_dir = os.path.join(base_path, 'data', opdict['datadir'])
    data_glob = opdict['dataglob']
    kurt_glob = opdict['kurtglob']
    grad_glob = opdict['gradglob']
    data_files = glob.glob(os.path.join(data_dir, data_glob))
    kurt_files = glob.glob(os.path.join(data_dir, kurt_glob))
    grad_files = glob.glob(os.path.join(data_dir, grad_glob))
    data_files.sort()
    kurt_files.sort()
    grad_files.sort()

    # stations
    stations_filename = os.path.join(base_path, 'lib', opdict['stations'])
    stations = read_stations_file(stations_filename)

    # grids
    grid_filename_base = os.path.join(base_path, 'lib', opdict['time_grid'])
    search_grid_filename = os.path.join(base_path, 'lib',
                                        opdict['search_grid'])

    # read time grid information
    time_grids = get_interpolated_time_grids(opdict)

    # read locations
    locs = read_locs_from_file(locfile)

    # prepare file for output of marginals
    f_marginals = h5py.File(locfile_hdf5, 'w')

    # iterate over locations
    for loc in locs:

        # create the appropriate grid on the fly

        # generate the grids
        o_time = loc['o_time']
        if space_only:
            start_time = o_time
            end_time = o_time
        else:
            start_time = o_time - 3 * loc['o_err_left']
            end_time = o_time + 3 * loc['o_err_right']

        # make a buffer for migration
        start_time_migration = start_time - 10.0
        end_time_migration = end_time + 10.0

        # re-read grid info to ensure clean copy
        grid_info = read_hdr_file(search_grid_filename)

        # read data
        grad_dict, delta = read_data_compatible_with_time_dict(
            grad_files, time_grids, start_time_migration, end_time_migration)

        # do migration (all metadata on grid is added to grid_info)
        do_migration_loop_continuous(opdict,
                                     grad_dict,
                                     delta,
                                     start_time_migration,
                                     grid_info,
                                     time_grids,
                                     keep_grid=True)

        # integrate to get the marginal probability density distributions

        # get required info
        grid_starttime = grid_info['start_time']
        nx, ny, nz, nt = grid_info['grid_shape']
        dx, dy, dz, dt = grid_info['grid_spacing']
        x_orig, y_orig, z_orig = grid_info['grid_orig']

        # we are only interested in the time around the origin time of the event
        it_left = np.int(np.round((start_time - grid_starttime) / dt))
        it_right = np.int(np.round((end_time - grid_starttime) / dt))
        it_true = np.int(np.round((o_time - grid_starttime) / dt))
        nt = (it_right - it_left) + 1

        # set up integration axes (wrt reference)
        x = np.arange(nx) * dx
        y = np.arange(ny) * dy
        z = np.arange(nz) * dz
        if not space_only:
            t = np.arange(nt) * dt

        # open the grid file
        grid_filename = grid_info['dat_file']
        f = h5py.File(grid_filename, 'r')
        stack_grid = f['stack_grid']

        # extract the portion of interest (copy data)
        if space_only:
            stack_3D = np.empty((nx, ny, nz))
            stack_3D[:] = stack_grid[:, it_true].reshape(nx, ny, nz)
        else:
            stack_4D = np.empty((nx, ny, nz, nt))
            stack_4D[:] = stack_grid[:, it_left:it_right + 1].reshape(
                nx, ny, nz, nt)

        # close the grid file
        f.close()

        # Get expected values (normalizes grid internally)
        if space_only:
            exp_x, exp_y, exp_z, cov_matrix, prob_dict = \
                compute_expected_coordinates3D(stack_3D,x,y,z,return_2Dgrids=True)
        else:
            exp_x, exp_y, exp_z, exp_t, cov_matrix, prob_dict = \
                compute_expected_coordinates4D(stack_4D,x,y,z,t,return_2Dgrids=True)

        # put reference location back
        exp_x = exp_x + x_orig
        exp_y = exp_y + y_orig
        exp_z = exp_z + z_orig
        if space_only:
            exp_t = o_time
        else:
            exp_t = start_time + exp_t

        # extract uncertainties from covariance matrix
        if space_only:
            sig_x, sig_y, sig_z = np.sqrt(np.diagonal(cov_matrix))
            sig_t = (loc['o_err_left'] + loc['o_err_right']) / 2.
        else:
            sig_x, sig_y, sig_z, sig_t = np.sqrt(np.diagonal(cov_matrix))

        # save the marginals to a hdf5 file in loc subdirectory (f_marginals)
        # each event becomes a group in this one file
        grp = f_marginals.create_group(exp_t.isoformat())
        grp.create_dataset('x', data=x + x_orig)
        grp.create_dataset('y', data=y + y_orig)
        grp.create_dataset('z', data=z + z_orig)
        grp.create_dataset('prob_x', data=prob_dict['prob_x0'])
        grp.create_dataset('prob_y', data=prob_dict['prob_x1'])
        grp.create_dataset('prob_z', data=prob_dict['prob_x2'])
        grp.create_dataset('prob_xy', data=prob_dict['prob_x0_x1'])
        grp.create_dataset('prob_xz', data=prob_dict['prob_x0_x2'])
        grp.create_dataset('prob_yz', data=prob_dict['prob_x1_x2'])
        if not space_only:
            grp.create_dataset('t', data=t - (o_time - start_time))
            grp.create_dataset('prob_t', data=prob_dict['prob_x3'])
            grp.create_dataset('prob_xt', data=prob_dict['prob_x0_x3'])
            grp.create_dataset('prob_yt', data=prob_dict['prob_x1_x3'])
            grp.create_dataset('prob_zt', data=prob_dict['prob_x2_x3'])

        # write the expected values to a plain text locations file

        f_prob.write("PROB DENSITY : T = %s s pm %.2f s, x= %.4f pm %.4f km, \
y= %.4f pm %.4f km, z= %.4f pm %.4f km\n"                                              % (exp_t.isoformat(), sig_t, \
          exp_x, sig_x, exp_y, sig_y, exp_z, sig_z))

    # close location files
    f_prob.close()
    f_marginals.close()
Example #5
0
def do_locations_prob_setup_and_run(opdict):
    """
    Setup and run probability-based locations on migration grids. Takes all
    parameters from WavelocOptions.opdict.

    :param opdict: Parameters and options for Waveloc.
    """

    # get / set info
    base_path = opdict["base_path"]
    space_only = opdict["probloc_spaceonly"]

    locfile = os.path.join(base_path, "out", opdict["outdir"], "loc", "locations.dat")
    locfile_prob = os.path.join(base_path, "out", opdict["outdir"], "loc", "locations_prob.dat")
    locfile_hdf5 = os.path.join(base_path, "out", opdict["outdir"], "loc", "locations_prob.hdf5")
    f_prob = open(locfile_prob, "w")

    # if locfile does not exist then make it by running trigger location
    if not os.path.exists(locfile):
        logging.info(
            "No location found at %s.  Running trigger location \
            first..."
            % locfile
        )
        do_locations_trigger_setup_and_run(opdict)

    # data files
    data_dir = os.path.join(base_path, "data", opdict["datadir"])
    data_glob = opdict["dataglob"]
    kurt_glob = opdict["kurtglob"]
    grad_glob = opdict["gradglob"]
    data_files = glob.glob(os.path.join(data_dir, data_glob))
    kurt_files = glob.glob(os.path.join(data_dir, kurt_glob))
    grad_files = glob.glob(os.path.join(data_dir, grad_glob))
    data_files.sort()
    kurt_files.sort()
    grad_files.sort()

    # grids
    search_grid_filename = os.path.join(base_path, "lib", opdict["search_grid"])

    # read time grid information
    time_grids = get_interpolated_time_grids(opdict)

    # read locations
    locs = read_locs_from_file(locfile)

    # prepare file for output of marginals
    f_marginals = h5py.File(locfile_hdf5, "w")

    # iterate over locations
    for loc in locs:
        # create the appropriate grid on the fly

        # generate the grids
        o_time = loc["o_time"]
        if space_only:
            start_time = o_time
            end_time = o_time
        else:
            start_time = o_time - 3 * loc["o_err_left"]
            end_time = o_time + 3 * loc["o_err_right"]

        # make a buffer for migration
        start_time_migration = start_time - 10.0
        end_time_migration = end_time + 10.0

        # re-read grid info to ensure clean copy
        grid_info = read_hdr_file(search_grid_filename)

        # read data
        grad_dict, delta = read_data_compatible_with_time_dict(
            grad_files, time_grids, start_time_migration, end_time_migration
        )

        # do migration (all metadata on grid is added to grid_info)
        do_migration_loop_continuous(
            opdict, grad_dict, delta, start_time_migration, grid_info, time_grids, keep_grid=True
        )

        # integrate to get the marginal probability density distributions

        # get required info
        grid_starttime = grid_info["start_time"]
        nx, ny, nz, nt = grid_info["grid_shape"]
        dx, dy, dz, dt = grid_info["grid_spacing"]
        x_orig, y_orig, z_orig = grid_info["grid_orig"]

        # we are only interested in the time around the origin time of the
        # event
        it_left = np.int(np.round((start_time - grid_starttime) / dt))
        it_right = np.int(np.round((end_time - grid_starttime) / dt))
        it_true = np.int(np.round((o_time - grid_starttime) / dt))
        nt = (it_right - it_left) + 1

        # set up integration axes (wrt reference)
        x = np.arange(nx) * dx
        y = np.arange(ny) * dy
        z = np.arange(nz) * dz
        if not space_only:
            t = np.arange(nt) * dt

        # open the grid file
        grid_filename = grid_info["dat_file"]
        f = h5py.File(grid_filename, "r")
        stack_grid = f["stack_grid"]

        # extract the portion of interest (copy data)
        if space_only:
            stack_3D = np.empty((nx, ny, nz))
            stack_3D[:] = stack_grid[:, it_true].reshape(nx, ny, nz)
        else:
            stack_4D = np.empty((nx, ny, nz, nt))
            stack_4D[:] = stack_grid[:, it_left : it_right + 1].reshape(nx, ny, nz, nt)

        # close the grid file
        f.close()

        # Get expected values (normalizes grid internally)
        if space_only:
            exp_x, exp_y, exp_z, cov_matrix, prob_dict = compute_expected_coordinates3D(
                stack_3D, x, y, z, return_2Dgrids=True
            )
        else:
            exp_x, exp_y, exp_z, exp_t, cov_matrix, prob_dict = compute_expected_coordinates4D(
                stack_4D, x, y, z, t, return_2Dgrids=True
            )

        # put reference location back
        exp_x = exp_x + x_orig
        exp_y = exp_y + y_orig
        exp_z = exp_z + z_orig
        if space_only:
            exp_t = o_time
        else:
            exp_t = start_time + exp_t

        # extract uncertainties from covariance matrix
        if space_only:
            sig_x, sig_y, sig_z = np.sqrt(np.diagonal(cov_matrix))
            sig_t = (loc["o_err_left"] + loc["o_err_right"]) / 2.0
        else:
            sig_x, sig_y, sig_z, sig_t = np.sqrt(np.diagonal(cov_matrix))

        # save the marginals to a hdf5 file in loc subdirectory (f_marginals)
        # each event becomes a group in this one file
        grp = f_marginals.create_group(exp_t.isoformat())
        grp.create_dataset("x", data=x + x_orig)
        grp.create_dataset("y", data=y + y_orig)
        grp.create_dataset("z", data=z + z_orig)
        grp.create_dataset("prob_x", data=prob_dict["prob_x0"])
        grp.create_dataset("prob_y", data=prob_dict["prob_x1"])
        grp.create_dataset("prob_z", data=prob_dict["prob_x2"])
        grp.create_dataset("prob_xy", data=prob_dict["prob_x0_x1"])
        grp.create_dataset("prob_xz", data=prob_dict["prob_x0_x2"])
        grp.create_dataset("prob_yz", data=prob_dict["prob_x1_x2"])
        if not space_only:
            grp.create_dataset("t", data=t - (o_time - start_time))
            grp.create_dataset("prob_t", data=prob_dict["prob_x3"])
            grp.create_dataset("prob_xt", data=prob_dict["prob_x0_x3"])
            grp.create_dataset("prob_yt", data=prob_dict["prob_x1_x3"])
            grp.create_dataset("prob_zt", data=prob_dict["prob_x2_x3"])

        # write the expected values to a plain text locations file
        f_prob.write(
            "PROB DENSITY : T = %s s pm %.2f s, x= %.4f pm %.4f km, \
                     y= %.4f pm %.4f km, z= %.4f pm %.4f km\n"
            % (exp_t.isoformat(), sig_t, exp_x, sig_x, exp_y, sig_y, exp_z, sig_z)
        )

    # close location files
    f_prob.close()
    f_marginals.close()