def do_locations_prob_setup_and_run(opdict): # get / set info base_path=opdict['base_path'] space_only = opdict['probloc_spaceonly'] locfile=os.path.join(base_path,'out',opdict['outdir'],'loc','locations.dat') locfile_prob=os.path.join(base_path,'out',opdict['outdir'],'loc','locations_prob.dat') locfile_hdf5=os.path.join(base_path,'out',opdict['outdir'],'loc','locations_prob.hdf5') f_prob=open(locfile_prob,'w') # if locfile does not exist then make it by running trigger location if not os.path.exists(locfile): logging.info('No location found at %s. Running trigger location first...'%locfile) do_locations_trigger_setup_and_run(opdict) # directories grid_dir=os.path.join(base_path,'out',opdict['outdir'],'grid') output_dir=os.path.join(base_path,'out',opdict['outdir']) # data files data_dir=os.path.join(base_path,'data',opdict['datadir']) data_glob=opdict['dataglob'] kurt_glob=opdict['kurtglob'] grad_glob=opdict['gradglob'] data_files=glob.glob(os.path.join(data_dir,data_glob)) kurt_files=glob.glob(os.path.join(data_dir,kurt_glob)) grad_files=glob.glob(os.path.join(data_dir,grad_glob)) data_files.sort() kurt_files.sort() grad_files.sort() # stations stations_filename=os.path.join(base_path,'lib',opdict['stations']) stations=read_stations_file(stations_filename) # grids grid_filename_base=os.path.join(base_path,'lib',opdict['time_grid']) search_grid_filename=os.path.join(base_path,'lib',opdict['search_grid']) # read time grid information time_grids=get_interpolated_time_grids(opdict) # read locations locs=read_locs_from_file(locfile) # prepare file for output of marginals f_marginals = h5py.File(locfile_hdf5,'w') # iterate over locations for loc in locs: # create the appropriate grid on the fly # generate the grids o_time=loc['o_time'] if space_only: start_time=o_time end_time =o_time else: start_time=o_time-3*loc['o_err_left'] end_time=o_time+3*loc['o_err_right'] # make a buffer for migration start_time_migration = start_time - 10.0 end_time_migration = end_time + 10.0 # re-read grid info to ensure clean copy grid_info=read_hdr_file(search_grid_filename) # read data grad_dict,delta = read_data_compatible_with_time_dict(grad_files, time_grids, start_time_migration, end_time_migration) # do migration (all metadata on grid is added to grid_info) do_migration_loop_continuous(opdict, grad_dict, delta, start_time_migration, grid_info, time_grids, keep_grid=True) # integrate to get the marginal probability density distributions # get required info grid_starttime=grid_info['start_time'] nx,ny,nz,nt=grid_info['grid_shape'] dx,dy,dz,dt=grid_info['grid_spacing'] x_orig,y_orig,z_orig=grid_info['grid_orig'] # we are only interested in the time around the origin time of the event it_left = np.int(np.round((start_time - grid_starttime)/dt)) it_right = np.int(np.round((end_time - grid_starttime)/dt)) it_true = np.int(np.round((o_time - grid_starttime)/dt)) nt=(it_right-it_left)+1 # set up integration axes (wrt reference) x=np.arange(nx)*dx y=np.arange(ny)*dy z=np.arange(nz)*dz if not space_only: t=np.arange(nt)*dt # open the grid file grid_filename=grid_info['dat_file'] f=h5py.File(grid_filename,'r') stack_grid=f['stack_grid'] # extract the portion of interest (copy data) if space_only: stack_3D=np.empty((nx,ny,nz)) stack_3D[:] = stack_grid[:,it_true].reshape(nx,ny,nz) else: stack_4D=np.empty((nx,ny,nz,nt)) stack_4D[:] = stack_grid[:,it_left:it_right+1].reshape(nx,ny,nz,nt) # close the grid file f.close() # Get expected values (normalizes grid internally) if space_only: exp_x, exp_y, exp_z, cov_matrix, prob_dict = \ compute_expected_coordinates3D(stack_3D,x,y,z,return_2Dgrids=True) else: exp_x, exp_y, exp_z, exp_t, cov_matrix, prob_dict = \ compute_expected_coordinates4D(stack_4D,x,y,z,t,return_2Dgrids=True) # put reference location back exp_x = exp_x + x_orig exp_y = exp_y + y_orig exp_z = exp_z + z_orig if space_only: exp_t = o_time else: exp_t = start_time + exp_t # extract uncertainties from covariance matrix if space_only: sig_x,sig_y,sig_z = np.sqrt(np.diagonal(cov_matrix)) sig_t = (loc['o_err_left']+loc['o_err_right'])/2. else: sig_x,sig_y,sig_z,sig_t = np.sqrt(np.diagonal(cov_matrix)) # save the marginals to a hdf5 file in loc subdirectory (f_marginals) # each event becomes a group in this one file grp = f_marginals.create_group(exp_t.isoformat()) grp.create_dataset('x',data=x+x_orig) grp.create_dataset('y',data=y+y_orig) grp.create_dataset('z',data=z+z_orig) grp.create_dataset('prob_x',data=prob_dict['prob_x0']) grp.create_dataset('prob_y',data=prob_dict['prob_x1']) grp.create_dataset('prob_z',data=prob_dict['prob_x2']) grp.create_dataset('prob_xy',data=prob_dict['prob_x0_x1']) grp.create_dataset('prob_xz',data=prob_dict['prob_x0_x2']) grp.create_dataset('prob_yz',data=prob_dict['prob_x1_x2']) if not space_only: grp.create_dataset('t',data=t-(o_time - start_time)) grp.create_dataset('prob_t',data=prob_dict['prob_x3']) grp.create_dataset('prob_xt',data=prob_dict['prob_x0_x3']) grp.create_dataset('prob_yt',data=prob_dict['prob_x1_x3']) grp.create_dataset('prob_zt',data=prob_dict['prob_x2_x3']) # write the expected values to a plain text locations file f_prob.write("PROB DENSITY : T = %s s pm %.2f s, x= %.4f pm %.4f km, \ y= %.4f pm %.4f km, z= %.4f pm %.4f km\n" % (exp_t.isoformat(), sig_t, \ exp_x, sig_x, exp_y, sig_y, exp_z, sig_z)) # close location files f_prob.close() f_marginals.close()
def do_plotting_setup_and_run(opdict, plot_wfm=True, plot_grid=True): # get / set info base_path = opdict['base_path'] locfile = os.path.join(base_path, 'out', opdict['outdir'], 'loc', 'locations.dat') stackfile = os.path.join(base_path, 'out', opdict['outdir'], 'stack', 'combined_stack_all.hdf5') grid_dir = os.path.join(base_path, 'out', opdict['outdir'], 'grid') output_dir = os.path.join(base_path, 'out', opdict['outdir']) data_dir = os.path.join(base_path, 'data', opdict['datadir']) data_glob = opdict['dataglob'] data_files = glob.glob(os.path.join(data_dir, data_glob)) data_files.sort() kurt_glob = opdict['kurtglob'] kurt_files = glob.glob(os.path.join(data_dir, kurt_glob)) kurt_files.sort() mig_files = kurt_files if opdict['kderiv']: grad_glob = opdict['gradglob'] grad_files = glob.glob(os.path.join(data_dir, grad_glob)) grad_files.sort() mig_files = grad_files if opdict['gauss']: gauss_glob = opdict['gaussglob'] gauss_files = glob.glob(os.path.join(data_dir, gauss_glob)) gauss_files.sort() mig_files = gauss_files figdir = os.path.join(base_path, 'out', opdict['outdir'], 'fig') # stations stations_filename = os.path.join(base_path, 'lib', opdict['stations']) stations = read_stations_file(stations_filename) # grids grid_filename_base = os.path.join(base_path, 'lib', opdict['time_grid']) search_grid_filename = os.path.join(base_path, 'lib', opdict['search_grid']) # read time grid information time_grids = get_interpolated_time_grids(opdict) # read locations locs = read_locs_from_file(locfile) # open stack file f_stack = h5py.File(stackfile, 'r') max_val = f_stack['max_val_smooth'] stack_start_time = UTCDateTime(max_val.attrs['start_time']) for loc in locs: # generate the grids o_time = loc['o_time'] start_time = o_time - opdict['plot_tbefore'] end_time = o_time + opdict['plot_tafter'] # re-read grid info to ensure clean copy grid_info = read_hdr_file(search_grid_filename) nx = grid_info['nx'] ny = grid_info['ny'] nz = grid_info['nz'] dx = grid_info['dx'] dy = grid_info['dy'] dz = grid_info['dz'] x = loc['x_mean'] y = loc['y_mean'] z = loc['z_mean'] # get the corresponding travel-times for time-shifting ttimes = {} for sta in time_grids.keys(): ttimes[sta] = time_grids[sta].value_at_point(x, y, z) tshift_migration = max(ttimes.values()) start_time_migration = start_time - tshift_migration end_time_migration = end_time + tshift_migration if plot_grid: logging.info('Plotting grid for location %s' % o_time.isoformat()) # TODO implement a rough estimation of the stack shift based on propagation time across the whole network # read data mig_dict, delta = read_data_compatible_with_time_dict( mig_files, time_grids, start_time_migration, end_time_migration) # do migration do_migration_loop_continuous(opdict, mig_dict, delta, start_time_migration, grid_info, time_grids, keep_grid=True) # plot plotLocationGrid(loc, grid_info, figdir, opdict['plot_otime_window']) if plot_wfm: logging.info('Plotting waveforms for location %s' % o_time.isoformat()) # get the index of the location # ix=np.int(np.round((loc['x_mean']-grid_info['x_orig'])/dx)) # iy=np.int(np.round((loc['y_mean']-grid_info['y_orig'])/dy)) # iz=np.int(np.round((loc['z_mean']-grid_info['z_orig'])/dz)) # ib= ix*ny*nz + iy*nz + iz # read data data_dict, delta = read_data_compatible_with_time_dict( data_files, time_grids, start_time_migration, end_time_migration) mig_dict, delta = read_data_compatible_with_time_dict( mig_files, time_grids, start_time_migration, end_time_migration) # cut desired portion out of data for sta in data_dict.keys(): tmp = data_dict[sta] istart = np.int( np.round( (start_time + ttimes[sta] - start_time_migration) / delta)) iend = istart + np.int( np.round((opdict['plot_tbefore'] + opdict['plot_tafter']) / delta)) # sanity check in case event is close to start or end of data if istart < 0: istart = 0 if iend > len(tmp): iend = len(tmp) data_dict[sta] = tmp[istart:iend] # do slice tmp = mig_dict[sta] mig_dict[sta] = tmp[istart:iend] # retrieve relevant portion of stack max istart = np.int( np.round((o_time - opdict['plot_tbefore'] - stack_start_time) / delta)) iend = istart + np.int( np.round( (opdict['plot_tbefore'] + opdict['plot_tafter']) / delta)) # sanity check in case event is close to start or end of data if istart < 0: start_time = start_time + np.abs(istart) * dt istart = 0 if iend > len(max_val): iend = len(max_val) # do slice stack_wfm = max_val[istart:iend] # plot plotLocationWaveforms(loc, start_time, delta, data_dict, mig_dict, stack_wfm, figdir) f_stack.close()
def do_migration_setup_and_run(opdict): """ Do setup and launch migration. :param opdict: WavelocOptions.opdict """ base_path = opdict['base_path'] runtime = opdict['time'] reloc = opdict['reloc'] # data data_dir = os.path.join(base_path, 'data', opdict['datadir']) if opdict['kderiv']: data_glob = opdict['gradglob'] if opdict['gauss']: data_glob = opdict['gaussglob'] else: data_glob = opdict['kurtglob'] data_files = glob.glob(os.path.join(data_dir, data_glob)) data_files.sort() if len(data_files) == 0: logging.error('No data files found for %s and %s' % (data_dir, data_glob)) raise UserWarning # grids search_grid_filename = os.path.join(base_path, 'lib', opdict['search_grid']) time_grids = get_interpolated_time_grids(opdict) #start and end times starttime = opdict['starttime'] endtime = opdict['endtime'] data_length = opdict['data_length'] data_overlap = opdict['data_overlap'] initial_start_time = utcdatetime.UTCDateTime(starttime) initial_end_time = initial_start_time + data_length final_end_time = utcdatetime.UTCDateTime(endtime) time_shift_secs = data_length - data_overlap ######### FOR EACH TIME SPAN - DO MIGRATION ############# # start loop over time start_time = initial_start_time end_time = initial_end_time if runtime: t_ref = time() while (start_time < final_end_time): # read data logging.info("Reading data : %s - %s." % (start_time.isoformat(), end_time.isoformat())) data, delta = \ read_data_compatible_with_time_dict(data_files, time_grids, start_time, end_time) if reloc: tr_glob = opdict['kurtglob'] files = glob.glob(os.path.join(data_dir, tr_glob)) traces, delta = \ read_data_compatible_with_time_dict(files, time_grids, start_time, end_time) sta_list = sorted(traces) for staname in sta_list: snr = np.max(traces[staname]) / np.mean(np.abs( traces[staname])) if snr < opdict['reloc_snr']: data[staname] = np.zeros(len(data[staname])) # re-read grid_info at each iteration to make sure it is a clean copy grid_info = read_hdr_file(search_grid_filename) # do migration if have enough data (3 is bare minimum) if len(data.keys()) >= 3: logging.info("Migrating data : %s - %s." % (start_time.isoformat(), end_time.isoformat())) do_migration_loop_continuous(opdict, data, delta, start_time, grid_info, time_grids) elif len(data.keys()) == 0: logging.warn('No data found between %s and %s.' % (start_time.isoformat(), end_time.isoformat())) else: logging.warn('Insufficient data found between %s and %s.' % (start_time.isoformat(), end_time.isoformat())) # Reset the start and end times to loop again start_time = start_time + time_shift_secs end_time = end_time + time_shift_secs if runtime: t = time() - t_ref logging.info("Time for migrating all time slices : %.2f s\n" % (t))
def do_locations_prob_setup_and_run(opdict): # get / set info base_path = opdict['base_path'] space_only = opdict['probloc_spaceonly'] locfile = os.path.join(base_path, 'out', opdict['outdir'], 'loc', 'locations.dat') locfile_prob = os.path.join(base_path, 'out', opdict['outdir'], 'loc', 'locations_prob.dat') locfile_hdf5 = os.path.join(base_path, 'out', opdict['outdir'], 'loc', 'locations_prob.hdf5') f_prob = open(locfile_prob, 'w') # if locfile does not exist then make it by running trigger location if not os.path.exists(locfile): logging.info( 'No location found at %s. Running trigger location first...' % locfile) do_locations_trigger_setup_and_run(opdict) # directories grid_dir = os.path.join(base_path, 'out', opdict['outdir'], 'grid') output_dir = os.path.join(base_path, 'out', opdict['outdir']) # data files data_dir = os.path.join(base_path, 'data', opdict['datadir']) data_glob = opdict['dataglob'] kurt_glob = opdict['kurtglob'] grad_glob = opdict['gradglob'] data_files = glob.glob(os.path.join(data_dir, data_glob)) kurt_files = glob.glob(os.path.join(data_dir, kurt_glob)) grad_files = glob.glob(os.path.join(data_dir, grad_glob)) data_files.sort() kurt_files.sort() grad_files.sort() # stations stations_filename = os.path.join(base_path, 'lib', opdict['stations']) stations = read_stations_file(stations_filename) # grids grid_filename_base = os.path.join(base_path, 'lib', opdict['time_grid']) search_grid_filename = os.path.join(base_path, 'lib', opdict['search_grid']) # read time grid information time_grids = get_interpolated_time_grids(opdict) # read locations locs = read_locs_from_file(locfile) # prepare file for output of marginals f_marginals = h5py.File(locfile_hdf5, 'w') # iterate over locations for loc in locs: # create the appropriate grid on the fly # generate the grids o_time = loc['o_time'] if space_only: start_time = o_time end_time = o_time else: start_time = o_time - 3 * loc['o_err_left'] end_time = o_time + 3 * loc['o_err_right'] # make a buffer for migration start_time_migration = start_time - 10.0 end_time_migration = end_time + 10.0 # re-read grid info to ensure clean copy grid_info = read_hdr_file(search_grid_filename) # read data grad_dict, delta = read_data_compatible_with_time_dict( grad_files, time_grids, start_time_migration, end_time_migration) # do migration (all metadata on grid is added to grid_info) do_migration_loop_continuous(opdict, grad_dict, delta, start_time_migration, grid_info, time_grids, keep_grid=True) # integrate to get the marginal probability density distributions # get required info grid_starttime = grid_info['start_time'] nx, ny, nz, nt = grid_info['grid_shape'] dx, dy, dz, dt = grid_info['grid_spacing'] x_orig, y_orig, z_orig = grid_info['grid_orig'] # we are only interested in the time around the origin time of the event it_left = np.int(np.round((start_time - grid_starttime) / dt)) it_right = np.int(np.round((end_time - grid_starttime) / dt)) it_true = np.int(np.round((o_time - grid_starttime) / dt)) nt = (it_right - it_left) + 1 # set up integration axes (wrt reference) x = np.arange(nx) * dx y = np.arange(ny) * dy z = np.arange(nz) * dz if not space_only: t = np.arange(nt) * dt # open the grid file grid_filename = grid_info['dat_file'] f = h5py.File(grid_filename, 'r') stack_grid = f['stack_grid'] # extract the portion of interest (copy data) if space_only: stack_3D = np.empty((nx, ny, nz)) stack_3D[:] = stack_grid[:, it_true].reshape(nx, ny, nz) else: stack_4D = np.empty((nx, ny, nz, nt)) stack_4D[:] = stack_grid[:, it_left:it_right + 1].reshape( nx, ny, nz, nt) # close the grid file f.close() # Get expected values (normalizes grid internally) if space_only: exp_x, exp_y, exp_z, cov_matrix, prob_dict = \ compute_expected_coordinates3D(stack_3D,x,y,z,return_2Dgrids=True) else: exp_x, exp_y, exp_z, exp_t, cov_matrix, prob_dict = \ compute_expected_coordinates4D(stack_4D,x,y,z,t,return_2Dgrids=True) # put reference location back exp_x = exp_x + x_orig exp_y = exp_y + y_orig exp_z = exp_z + z_orig if space_only: exp_t = o_time else: exp_t = start_time + exp_t # extract uncertainties from covariance matrix if space_only: sig_x, sig_y, sig_z = np.sqrt(np.diagonal(cov_matrix)) sig_t = (loc['o_err_left'] + loc['o_err_right']) / 2. else: sig_x, sig_y, sig_z, sig_t = np.sqrt(np.diagonal(cov_matrix)) # save the marginals to a hdf5 file in loc subdirectory (f_marginals) # each event becomes a group in this one file grp = f_marginals.create_group(exp_t.isoformat()) grp.create_dataset('x', data=x + x_orig) grp.create_dataset('y', data=y + y_orig) grp.create_dataset('z', data=z + z_orig) grp.create_dataset('prob_x', data=prob_dict['prob_x0']) grp.create_dataset('prob_y', data=prob_dict['prob_x1']) grp.create_dataset('prob_z', data=prob_dict['prob_x2']) grp.create_dataset('prob_xy', data=prob_dict['prob_x0_x1']) grp.create_dataset('prob_xz', data=prob_dict['prob_x0_x2']) grp.create_dataset('prob_yz', data=prob_dict['prob_x1_x2']) if not space_only: grp.create_dataset('t', data=t - (o_time - start_time)) grp.create_dataset('prob_t', data=prob_dict['prob_x3']) grp.create_dataset('prob_xt', data=prob_dict['prob_x0_x3']) grp.create_dataset('prob_yt', data=prob_dict['prob_x1_x3']) grp.create_dataset('prob_zt', data=prob_dict['prob_x2_x3']) # write the expected values to a plain text locations file f_prob.write("PROB DENSITY : T = %s s pm %.2f s, x= %.4f pm %.4f km, \ y= %.4f pm %.4f km, z= %.4f pm %.4f km\n" % (exp_t.isoformat(), sig_t, \ exp_x, sig_x, exp_y, sig_y, exp_z, sig_z)) # close location files f_prob.close() f_marginals.close()
def do_locations_prob_setup_and_run(opdict): """ Setup and run probability-based locations on migration grids. Takes all parameters from WavelocOptions.opdict. :param opdict: Parameters and options for Waveloc. """ # get / set info base_path = opdict["base_path"] space_only = opdict["probloc_spaceonly"] locfile = os.path.join(base_path, "out", opdict["outdir"], "loc", "locations.dat") locfile_prob = os.path.join(base_path, "out", opdict["outdir"], "loc", "locations_prob.dat") locfile_hdf5 = os.path.join(base_path, "out", opdict["outdir"], "loc", "locations_prob.hdf5") f_prob = open(locfile_prob, "w") # if locfile does not exist then make it by running trigger location if not os.path.exists(locfile): logging.info( "No location found at %s. Running trigger location \ first..." % locfile ) do_locations_trigger_setup_and_run(opdict) # data files data_dir = os.path.join(base_path, "data", opdict["datadir"]) data_glob = opdict["dataglob"] kurt_glob = opdict["kurtglob"] grad_glob = opdict["gradglob"] data_files = glob.glob(os.path.join(data_dir, data_glob)) kurt_files = glob.glob(os.path.join(data_dir, kurt_glob)) grad_files = glob.glob(os.path.join(data_dir, grad_glob)) data_files.sort() kurt_files.sort() grad_files.sort() # grids search_grid_filename = os.path.join(base_path, "lib", opdict["search_grid"]) # read time grid information time_grids = get_interpolated_time_grids(opdict) # read locations locs = read_locs_from_file(locfile) # prepare file for output of marginals f_marginals = h5py.File(locfile_hdf5, "w") # iterate over locations for loc in locs: # create the appropriate grid on the fly # generate the grids o_time = loc["o_time"] if space_only: start_time = o_time end_time = o_time else: start_time = o_time - 3 * loc["o_err_left"] end_time = o_time + 3 * loc["o_err_right"] # make a buffer for migration start_time_migration = start_time - 10.0 end_time_migration = end_time + 10.0 # re-read grid info to ensure clean copy grid_info = read_hdr_file(search_grid_filename) # read data grad_dict, delta = read_data_compatible_with_time_dict( grad_files, time_grids, start_time_migration, end_time_migration ) # do migration (all metadata on grid is added to grid_info) do_migration_loop_continuous( opdict, grad_dict, delta, start_time_migration, grid_info, time_grids, keep_grid=True ) # integrate to get the marginal probability density distributions # get required info grid_starttime = grid_info["start_time"] nx, ny, nz, nt = grid_info["grid_shape"] dx, dy, dz, dt = grid_info["grid_spacing"] x_orig, y_orig, z_orig = grid_info["grid_orig"] # we are only interested in the time around the origin time of the # event it_left = np.int(np.round((start_time - grid_starttime) / dt)) it_right = np.int(np.round((end_time - grid_starttime) / dt)) it_true = np.int(np.round((o_time - grid_starttime) / dt)) nt = (it_right - it_left) + 1 # set up integration axes (wrt reference) x = np.arange(nx) * dx y = np.arange(ny) * dy z = np.arange(nz) * dz if not space_only: t = np.arange(nt) * dt # open the grid file grid_filename = grid_info["dat_file"] f = h5py.File(grid_filename, "r") stack_grid = f["stack_grid"] # extract the portion of interest (copy data) if space_only: stack_3D = np.empty((nx, ny, nz)) stack_3D[:] = stack_grid[:, it_true].reshape(nx, ny, nz) else: stack_4D = np.empty((nx, ny, nz, nt)) stack_4D[:] = stack_grid[:, it_left : it_right + 1].reshape(nx, ny, nz, nt) # close the grid file f.close() # Get expected values (normalizes grid internally) if space_only: exp_x, exp_y, exp_z, cov_matrix, prob_dict = compute_expected_coordinates3D( stack_3D, x, y, z, return_2Dgrids=True ) else: exp_x, exp_y, exp_z, exp_t, cov_matrix, prob_dict = compute_expected_coordinates4D( stack_4D, x, y, z, t, return_2Dgrids=True ) # put reference location back exp_x = exp_x + x_orig exp_y = exp_y + y_orig exp_z = exp_z + z_orig if space_only: exp_t = o_time else: exp_t = start_time + exp_t # extract uncertainties from covariance matrix if space_only: sig_x, sig_y, sig_z = np.sqrt(np.diagonal(cov_matrix)) sig_t = (loc["o_err_left"] + loc["o_err_right"]) / 2.0 else: sig_x, sig_y, sig_z, sig_t = np.sqrt(np.diagonal(cov_matrix)) # save the marginals to a hdf5 file in loc subdirectory (f_marginals) # each event becomes a group in this one file grp = f_marginals.create_group(exp_t.isoformat()) grp.create_dataset("x", data=x + x_orig) grp.create_dataset("y", data=y + y_orig) grp.create_dataset("z", data=z + z_orig) grp.create_dataset("prob_x", data=prob_dict["prob_x0"]) grp.create_dataset("prob_y", data=prob_dict["prob_x1"]) grp.create_dataset("prob_z", data=prob_dict["prob_x2"]) grp.create_dataset("prob_xy", data=prob_dict["prob_x0_x1"]) grp.create_dataset("prob_xz", data=prob_dict["prob_x0_x2"]) grp.create_dataset("prob_yz", data=prob_dict["prob_x1_x2"]) if not space_only: grp.create_dataset("t", data=t - (o_time - start_time)) grp.create_dataset("prob_t", data=prob_dict["prob_x3"]) grp.create_dataset("prob_xt", data=prob_dict["prob_x0_x3"]) grp.create_dataset("prob_yt", data=prob_dict["prob_x1_x3"]) grp.create_dataset("prob_zt", data=prob_dict["prob_x2_x3"]) # write the expected values to a plain text locations file f_prob.write( "PROB DENSITY : T = %s s pm %.2f s, x= %.4f pm %.4f km, \ y= %.4f pm %.4f km, z= %.4f pm %.4f km\n" % (exp_t.isoformat(), sig_t, exp_x, sig_x, exp_y, sig_y, exp_z, sig_z) ) # close location files f_prob.close() f_marginals.close()
def do_migration_setup_and_run(opdict): base_path=opdict['base_path'] verbose=opdict['verbose'] runtime=opdict['time'] reloc=opdict['reloc'] # stations stations_filename=os.path.join(base_path,'lib',opdict['stations']) stations=read_stations_file(stations_filename) # output directory output_dir=os.path.join(base_path,'out',opdict['outdir']) stack_dir=os.path.join(output_dir,'stack') # data data_dir=os.path.join(base_path,'data',opdict['datadir']) if opdict['kderiv']: data_glob=opdict['gradglob'] if opdict['gauss']: data_glob=opdict['gaussglob'] else: data_glob=opdict['kurtglob'] data_files=glob.glob(os.path.join(data_dir,data_glob)) data_files.sort() if len(data_files)==0: logging.error('No data files found for %s and %s'%(data_dir,data_glob)) raise UserWarning # grids grid_filename_base=os.path.join(base_path,'lib',opdict['time_grid']) search_grid_filename=os.path.join(base_path,'lib',opdict['search_grid']) time_grids=get_interpolated_time_grids(opdict) #start and end times starttime=opdict['starttime'] endtime=opdict['endtime'] data_length=opdict['data_length'] data_overlap=opdict['data_overlap'] initial_start_time=utcdatetime.UTCDateTime(starttime) initial_end_time=initial_start_time+data_length final_end_time=utcdatetime.UTCDateTime(endtime) time_shift_secs=data_length-data_overlap ######### FOR EACH TIME SPAN - DO MIGRATION ############# # start loop over time start_time=initial_start_time end_time=initial_end_time if runtime: t_ref=time() while (start_time < final_end_time): # read data logging.info("Reading data : %s - %s."%(start_time.isoformat(), end_time.isoformat())) data,delta=read_data_compatible_with_time_dict(data_files,time_grids,start_time,end_time) print len(data_files) if reloc: tr_glob=opdict['kurtglob'] files=glob.glob(os.path.join(data_dir,tr_glob)) traces,delta=read_data_compatible_with_time_dict(files,time_grids,start_time,end_time) sta_list=sorted(traces) for staname in sta_list: snr=np.max(traces[staname])/np.mean(np.abs(traces[staname])) if snr < opdict['reloc_snr']: data[staname]=np.zeros(len(data[staname])) # re-read grid_info at each iteration to make sure it is a clean copy grid_info=read_hdr_file(search_grid_filename) # do migration if have enough data (3 is bare minimum) if len(data.keys())>=3: logging.info("Migrating data : %s - %s."%(start_time.isoformat(), end_time.isoformat())) do_migration_loop_continuous(opdict, data, delta, start_time, grid_info, time_grids) elif len(data.keys())==0: logging.warn('No data found between %s and %s.'%(start_time.isoformat(),end_time.isoformat())) else: logging.warn('Insufficient data found between %s and %s.'%(start_time.isoformat(),end_time.isoformat())) # Reset the start and end times to loop again start_time=start_time+time_shift_secs end_time=end_time+time_shift_secs if runtime: t=time()-t_ref logging.info("Time for migrating all time slices : %.2f s\n" % (t))
def do_plotting_setup_and_run(opdict, plot_wfm=True, plot_grid=True): """ Plot the results of a wavloc run (migration and location). All options and parameters are taken from an opdict. :param opdict: WavlocOptions.opdict that contains the options / parameters. :param plot_wfm: If ``True`` plots waveforms after location (filtered data and kurtosis). :param plot_grid: If ``True``plots the migration grid. :type plot_wfm: boolean :type plot_grid: boolean """ # get / set info base_path = opdict['base_path'] locfile = os.path.join(base_path, 'out', opdict['outdir'], 'loc', 'locations.dat') stackfile = os.path.join(base_path, 'out', opdict['outdir'], 'stack', 'combined_stack_all.hdf5') data_dir = os.path.join(base_path, 'data', opdict['datadir']) data_glob = opdict['dataglob'] data_files = glob.glob(os.path.join(data_dir, data_glob)) data_files.sort() kurt_glob = opdict['kurtglob'] kurt_files = glob.glob(os.path.join(data_dir, kurt_glob)) kurt_files.sort() mig_files = kurt_files if opdict['kderiv']: grad_glob = opdict['gradglob'] grad_files = glob.glob(os.path.join(data_dir, grad_glob)) grad_files.sort() mig_files = grad_files if opdict['gauss']: gauss_glob = opdict['gaussglob'] gauss_files = glob.glob(os.path.join(data_dir, gauss_glob)) gauss_files.sort() mig_files = gauss_files figdir = os.path.join(base_path, 'out', opdict['outdir'], 'fig') # grids search_grid_filename = os.path.join(base_path, 'lib', opdict['search_grid']) # read time grid information time_grids = get_interpolated_time_grids(opdict) # read locations locs = read_locs_from_file(locfile) # open stack file f_stack = h5py.File(stackfile, 'r') max_val = f_stack['max_val_smooth'] stack_start_time = UTCDateTime(max_val.attrs['start_time']) for loc in locs: # generate the grids o_time = loc['o_time'] start_time = o_time-opdict['plot_tbefore'] end_time = o_time+opdict['plot_tafter'] # re-read grid info to ensure clean copy grid_info = read_hdr_file(search_grid_filename) x = loc['x_mean'] y = loc['y_mean'] z = loc['z_mean'] # get the corresponding travel-times for time-shifting ttimes = {} for sta in time_grids.keys(): ttimes[sta] = time_grids[sta].value_at_point(x, y, z) tshift_migration = max(ttimes.values()) start_time_migration = start_time-tshift_migration end_time_migration = end_time+tshift_migration if plot_grid: logging.info('Plotting grid for location %s' % o_time.isoformat()) # read data mig_dict, delta = \ read_data_compatible_with_time_dict(mig_files, time_grids, start_time_migration, end_time_migration) # do migration do_migration_loop_continuous(opdict, mig_dict, delta, start_time_migration, grid_info, time_grids, keep_grid=True) # plot plotLocationGrid(loc, grid_info, figdir, opdict['plot_otime_window']) if plot_wfm: logging.info('Plotting waveforms for location %s' % o_time.isoformat()) # read data data_dict, delta = \ read_data_compatible_with_time_dict(data_files, time_grids, start_time_migration, end_time_migration) mig_dict, delta = \ read_data_compatible_with_time_dict(mig_files, time_grids, start_time_migration, end_time_migration) # cut desired portion out of data for sta in data_dict.keys(): tmp = data_dict[sta] # alignment on origin time istart = np.int(np.round((start_time + ttimes[sta] - start_time_migration) / delta)) iend = istart + np.int(np.round((opdict['plot_tbefore'] + opdict['plot_tafter']) / delta)) # sanity check in case event is close to start or end of data if istart < 0: istart = 0 if iend > len(tmp): iend = len(tmp) data_dict[sta] = tmp[istart:iend] # do slice tmp = mig_dict[sta] mig_dict[sta] = tmp[istart:iend] # retrieve relevant portion of stack max istart = np.int(np.round((o_time - opdict['plot_tbefore'] - stack_start_time) / delta)) iend = istart + np.int(np.round((opdict['plot_tbefore'] + opdict['plot_tafter']) / delta)) # sanity check in case event is close to start or end of data if istart < 0: start_time = start_time + np.abs(istart)*delta istart = 0 if iend > len(max_val): iend = len(max_val) # do slice stack_wfm = max_val[istart:iend] # plot plotLocationWaveforms(loc, start_time, delta, data_dict, mig_dict, stack_wfm, figdir) f_stack.close()
def do_plotting_setup_and_run(opdict,plot_wfm=True,plot_grid=True): # get / set info base_path=opdict['base_path'] locfile=os.path.join(base_path,'out',opdict['outdir'],'loc','locations.dat') stackfile=os.path.join(base_path,'out',opdict['outdir'],'stack','combined_stack_all.hdf5') grid_dir=os.path.join(base_path,'out',opdict['outdir'],'grid') output_dir=os.path.join(base_path,'out',opdict['outdir']) data_dir=os.path.join(base_path,'data',opdict['datadir']) data_glob=opdict['dataglob'] data_files=glob.glob(os.path.join(data_dir,data_glob)) data_files.sort() kurt_glob=opdict['kurtglob'] kurt_files=glob.glob(os.path.join(data_dir,kurt_glob)) kurt_files.sort() mig_files=kurt_files if opdict['kderiv']: grad_glob=opdict['gradglob'] grad_files=glob.glob(os.path.join(data_dir,grad_glob)) grad_files.sort() mig_files=grad_files if opdict['gauss']: gauss_glob=opdict['gaussglob'] gauss_files=glob.glob(os.path.join(data_dir,gauss_glob)) gauss_files.sort() mig_files=gauss_files figdir=os.path.join(base_path,'out',opdict['outdir'],'fig') # stations stations_filename=os.path.join(base_path,'lib',opdict['stations']) stations=read_stations_file(stations_filename) # grids grid_filename_base=os.path.join(base_path,'lib',opdict['time_grid']) search_grid_filename=os.path.join(base_path,'lib',opdict['search_grid']) # read time grid information time_grids=get_interpolated_time_grids(opdict) # read locations locs=read_locs_from_file(locfile) # open stack file f_stack=h5py.File(stackfile,'r') max_val=f_stack['max_val_smooth'] stack_start_time=UTCDateTime(max_val.attrs['start_time']) for loc in locs: # generate the grids o_time=loc['o_time'] start_time=o_time-opdict['plot_tbefore'] end_time=o_time+opdict['plot_tafter'] # re-read grid info to ensure clean copy grid_info=read_hdr_file(search_grid_filename) nx=grid_info['nx'] ny=grid_info['ny'] nz=grid_info['nz'] dx=grid_info['dx'] dy=grid_info['dy'] dz=grid_info['dz'] x=loc['x_mean'] y=loc['y_mean'] z=loc['z_mean'] # get the corresponding travel-times for time-shifting ttimes={} for sta in time_grids.keys(): ttimes[sta]=time_grids[sta].value_at_point(x,y,z) tshift_migration=max(ttimes.values()) start_time_migration=start_time-tshift_migration end_time_migration=end_time+tshift_migration if plot_grid: logging.info('Plotting grid for location %s'%o_time.isoformat()) # TODO implement a rough estimation of the stack shift based on propagation time across the whole network # read data mig_dict,delta = read_data_compatible_with_time_dict(mig_files, time_grids, start_time_migration, end_time_migration) # do migration do_migration_loop_continuous(opdict, mig_dict, delta, start_time_migration, grid_info, time_grids, keep_grid=True) # plot plotLocationGrid(loc,grid_info,figdir,opdict['plot_otime_window']) if plot_wfm: logging.info('Plotting waveforms for location %s'%o_time.isoformat()) # get the index of the location # ix=np.int(np.round((loc['x_mean']-grid_info['x_orig'])/dx)) # iy=np.int(np.round((loc['y_mean']-grid_info['y_orig'])/dy)) # iz=np.int(np.round((loc['z_mean']-grid_info['z_orig'])/dz)) # ib= ix*ny*nz + iy*nz + iz # read data data_dict,delta = read_data_compatible_with_time_dict(data_files, time_grids, start_time_migration, end_time_migration) mig_dict,delta = read_data_compatible_with_time_dict(mig_files, time_grids, start_time_migration, end_time_migration) # cut desired portion out of data for sta in data_dict.keys(): tmp=data_dict[sta] istart=np.int(np.round( (start_time + ttimes[sta] - start_time_migration) / delta)) iend=istart + np.int(np.round( (opdict['plot_tbefore'] + opdict['plot_tafter']) / delta)) # sanity check in case event is close to start or end of data if istart < 0 : istart=0 if iend > len(tmp) : iend = len(tmp) data_dict[sta]=tmp[istart:iend] # do slice tmp=mig_dict[sta] mig_dict[sta]=tmp[istart:iend] # retrieve relevant portion of stack max istart=np.int(np.round( (o_time - opdict['plot_tbefore'] -stack_start_time) / delta)) iend=istart + np.int(np.round( (opdict['plot_tbefore'] + opdict['plot_tafter']) / delta)) # sanity check in case event is close to start or end of data if istart < 0 : start_time = start_time + np.abs(istart)*dt istart=0 if iend > len(max_val) : iend = len(max_val) # do slice stack_wfm=max_val[istart:iend] # plot plotLocationWaveforms(loc,start_time,delta,data_dict,mig_dict,stack_wfm,figdir) f_stack.close()