def __init__(self, sac_files, lat_min=-90, lat_max=90, lon_min=-180, lon_max=180, dep_min=0, dep_max=800, Mw_min=0, Mw_max=10, dist_min=0, dist_max=180, n_within_dist=10, start_date=datetime(1970, 1, 1), end_date=datetime(2050, 1, 1)): self.sac_files = sac_files self.lat_min = lat_min self.lat_max = lat_max self.lon_min = lon_min self.lon_max = lon_max self.dep_min = dep_min self.dep_max = dep_max self.Mw_min = Mw_min self.Mw_max = Mw_max self.dist_min = dist_min self.dist_max = dist_max self.n_within_dist = n_within_dist self.start_date = start_date self.end_date = end_date self.dataset = Dataset.dataset_from_sac(sac_files, headonly=True)
def get_dataset(model, tlen=1638.4, nspc=64, sampling_hz=20, mode=0, add_noise=False, noise_normalized_std=1.): #TODO fix outputs.us=NaN when event.latitude==station.latitude event = get_ref_event() events = [event] stations = [ Station('{:03d}'.format(i), 'DSM', event.latitude + 5 + 0.5 * i, event.longitude + 0.1) for i in range(61) ] dataset = Dataset.dataset_from_arrays(events, [stations], sampling_hz=sampling_hz) pydsm_input = PyDSMInput.input_from_arrays(event, stations, model, tlen, nspc, sampling_hz) pydsm_output = compute(pydsm_input, mode=mode) pydsm_output.to_time_domain() dataset.data = np.zeros((1, ) + pydsm_output.us.shape, dtype=np.float64) dataset.data[0] = pydsm_output.us if add_noise: noise_arr = white_noise(noise_normalized_std, dataset.data.shape) npts_cut = int(dataset.data.shape[3] * 0.9) norm = np.abs(dataset.data[:, :, :npts_cut]).max(axis=3, keepdims=True) noise_arr *= norm dataset.data += noise_arr return dataset, pydsm_output
def main(path): """Compute synthetics in parallel. Args: path (str): path to an input file. """ comm = MPI.COMM_WORLD rank = comm.Get_rank() # open log file log = open('log', 'w', buffering=1) if rank == 0: input_file = PyDSMInputFile(path) params = input_file.read() seismic_model = SeismicModel.model_from_name(params['seismic_model']) tlen = params['tlen'] nspc = params['nspc'] sampling_hz = params['sampling_hz'] mode = params['mode'] verbose = params['verbose'] start_time = time.time() dataset = Dataset.dataset_from_sac(params['sac_files'], verbose=verbose) end_time = time.time() if verbose >= 1: log.write( 'Initalizing dataset finished in {} s\n'.format(end_time - start_time)) else: params = None dataset = None seismic_model = None tlen = None nspc = None sampling_hz = None mode = None verbose = None # run pydsm start_time = time.time() outputs = dsm.compute_dataset_parallel(dataset, seismic_model, tlen, nspc, sampling_hz, mode=mode, verbose=verbose, log=log) end_time = time.time() log.write('rank {}: DSM finished in {} s\n'.format(rank, end_time - start_time)) if rank == 0: start_time = time.time() for output in outputs: output.set_source_time_function(None) output.to_time_domain() output.write(params['output_folder'], format='sac') end_time = time.time() log.write('finished FFT and writing in {} s\n'.format( rank, end_time - start_time)) log.close() return "Done!"
import dsmpy.utils.scardec as scardec from dsmpy.utils.cmtcatalog import read_catalog from dsmpy.dataset import Dataset import glob if __name__ == '__main__': sac_files = glob.glob('/mnt/doremi/anpan/inversion/MTZ_JAPAN/DATA/2*/*Z') dataset = Dataset.dataset_from_sac(sac_files) for event in dataset.events: print(event) duration_scardec = scardec.get_duration(event) if duration_scardec is None: print('Scardec STF not found for {}'.format(event)) print('scardec_duration, gcmt_duration: {} {}'.format( duration_scardec, 2 * event.source_time_function.half_duration))
distance_min = 10. distance_max = 90. dir_syn = '.' t_before = 10. t_after = 20. buffer = 10. catalog_path = 'stf_catalog.txt' n_distinct_comp_phase = 1 for sac_files in sac_files_iterator( '/home/anselme/Dropbox/Kenji/MTZ_JAPAN/DATA/20*/*T'): logging.info('{} num sacs = {}\n'.format(rank, len(sac_files))) if rank == 0: logging.info('{} reading dataset\n'.format(rank)) dataset = Dataset.dataset_from_sac(sac_files, headonly=False) logging.info('{} computing time windows\n'.format(rank)) windows_S = WindowMaker.windows_from_dataset(dataset, 'prem', ['S'], [Component.T], t_before=t_before, t_after=t_after) windows = windows_S windows = [ window for window in windows if (distance_min <= window.get_epicentral_distance() <= distance_max) ] else: dataset = None
from dsmpy.dataset import Dataset from dsmpy import rootdsm_psv from dsmpy import rootdsm_psv, rootdsm_sh if __name__ == '__main__': parameter_files = [rootdsm_psv + 'test2.inf', rootdsm_psv + 'test3.inf'] dataset = Dataset.dataset_from_files(parameter_files) counts, displacements = dataset.get_chunks_station(2) counts_eq, displacements_eq = dataset.get_chunks_eq(2) parameter_files_sh = [rootdsm_sh + 'AK135_SH.inf'] dataset_sh = Dataset.dataset_from_files(parameter_files_sh, mode=1)