def execute_plot(tensor_info, data_type, segments_data, default_dirs, velmodel=None, plot_input=False): """We plot modelling results :param tensor_info: dictionary with moment tensor properties :param data_type: set with data types to be used in modelling :param plot_input: choose whether to plot initial kinematic model as well :param default_dirs: dictionary with default directories to be used :param velmodel: dictionary with velocity model :type velmodel: dict, optional :type default_dirs: dict :type tensor_info: dict :type data_type: set :type plot_input: bool, optional """ print('Plot results') segments = segments_data['segments'] rise_time = segments_data['rise_time'] solution = get_outputs.read_solution_static_format(segments) if not velmodel: velmodel = mv.select_velmodel(tensor_info, default_dirs) point_sources = pf.point_sources_param(segments, tensor_info, rise_time) shear = pf.shear_modulous(point_sources, velmodel=velmodel) plot.plot_ffm_sol(tensor_info, segments_data, point_sources, shear, solution, velmodel, default_dirs) plot.plot_misfit(data_type) # plot.plot_beachballs(tensor_info, segments, data_type) traces_info, stations_gps = [None, None] if 'strong_motion' in data_type: traces_info = json.load(open('strong_motion_waves.json')) if 'gps' in data_type: names, lats, lons, observed, synthetic, error = get_outputs.retrieve_gps( ) stations_gps = zip(names, lats, lons, observed, synthetic, error) if 'strong_motion' in data_type or 'gps' in data_type: plot._PlotMap(tensor_info, segments, point_sources, solution, default_dirs, files_str=traces_info, stations_gps=stations_gps) if plot_input: input_model = load_ffm_model(segments_data, option='Fault.time') plot._PlotSlipDist_Compare(segments, point_sources, input_model, solution) plot._PlotComparisonMap(tensor_info, segments, point_sources, input_model, solution)
def _automatic2(tensor_info, plane_data, data_type, data_prop, default_dirs, logger, velmodel=None): """Routine for automatic FFM modelling for each nodal plane :param tensor_info: dictionary with moment tensor properties :param plane_data: dictionary with fault plane mechanism :param data_type: list with data types to be used in modelling :param default_dirs: dictionary with default directories to be used :param data_prop: dictionary with properties for different waveform types :param velmodel: dictionary with velocity model :type default_dirs: dict :type tensor_info: dict :type plane_data: dict :type data_type: list :type data_prop: dict :type velmodel: dict, optional """ # # Create JSON files # logger.info('Create input files for Fortran scripts') logger.info('Create automatic JSON') tensor.write_tensor(tensor_info) if velmodel: mv.velmodel2json(velmodel) if not velmodel: velmodel = mv.select_velmodel(tensor_info, default_dirs) np_plane_info = plane_data['plane_info'] data_folder = os.path.join('..', 'data') dm.filling_data_dicts(tensor_info, data_type, data_prop, data_folder) segments_data = pf.create_finite_fault(tensor_info, np_plane_info, data_type) segments = segments_data['segments'] rise_time = segments_data['rise_time'] mp.modelling_prop(tensor_info, segments_data, data_type=data_type) # # write text files from JSONs # rupt_vel = segments[0]['rupture_vel'] lambda_min = 0.4 lambda_max = 1.25 min_vel, max_vel = [lambda_min * rupt_vel, lambda_max * rupt_vel] logger.info('Write input files') writing_inputs(tensor_info, data_type, segments_data, min_vel, max_vel) # # Modelling and plotting results # inversion(tensor_info, data_type, default_dirs, logger) logger.info('Plot data in folder {}'.format(os.getcwd())) execute_plot(tensor_info, data_type, segments_data, default_dirs, velmodel=velmodel) base = os.path.basename(os.getcwd()) dirname = os.path.abspath(os.getcwd()) # # write solution in FSP format # # segments, rise_time, point_sources = pl_mng.__read_planes_info() solution = get_outputs.read_solution_static_format(segments) static_to_fsp(tensor_info, segments_data, data_type, velmodel, solution) for file in glob.glob('*png'): if os.path.isfile(os.path.join(dirname, base, file)): copy2(os.path.join(dirname, base, file), os.path.join(dirname, 'plots'))
def automatic_usgs(tensor_info, data_type, default_dirs, velmodel=None, dt_cgps=1.0): """Routine for automatic FFM modelling :param tensor_info: dictionary with moment tensor properties :param data_type: list with data types to be used in modelling :param default_dirs: dictionary with default directories to be used :param velmodel: dictionary with velocity model :param dt_cgps: sampling interval for cgps data :type tensor_info: dict :type data_type: list :type default_dirs: dict :type velmodel: dict, optional :type dt_cgps: float, optional """ logger = ml.create_log('automatic_ffm', os.path.join('logs', 'automatic_ffm.log')) logger = ml.add_console_handler(logger) logger.info('Starting fff program') sol_folder = os.getcwd() sol_folder = os.path.abspath(sol_folder) time0 = time.time() data_prop = tp.properties_json(tensor_info, dt_cgps=dt_cgps) os.chdir(os.path.join(sol_folder, 'data')) time2 = time.time() logger.info('Process data') processing(tensor_info, data_type, data_prop) time2 = time.time() - time2 logger.info('Time spent processing traces: {}'.format(time2)) os.chdir(sol_folder) logger.info('Compute GF bank') if not velmodel: velmodel = mv.select_velmodel(tensor_info, default_dirs) input_files.write_velmodel(velmodel) gf_bank_str = os.path.join(sol_folder, 'GF_strong') gf_bank_cgps = os.path.join(sol_folder, 'GF_cgps') get_gf_bank = default_dirs['strong_motion_gf_bank2'] if 'cgps' in data_type: logger.info('Compute cGPS GF bank') green_dict = gf.fk_green_fun1(data_prop, tensor_info, gf_bank_cgps, cgps=True) input_files.write_green_file(green_dict, cgps=True) with open(os.path.join('logs', 'GF_cgps_log'), "w") as out_gf_cgps: p1 = subprocess.Popen([get_gf_bank, 'cgps'], stdout=out_gf_cgps) p1.wait() if 'strong_motion' in data_type: logger.info('Compute strong motion GF bank') green_dict = gf.fk_green_fun1(data_prop, tensor_info, gf_bank_str) input_files.write_green_file(green_dict) with open(os.path.join('logs', 'GF_strong_log'), "w") as out_gf_strong: p2 = subprocess.Popen([ get_gf_bank, ], stdout=out_gf_strong) p2.wait() files = [ 'Green.in', 'Green_cgps.in', 'modelling_stats.json', os.path.join('data', 'gps_data'), 'strong_motion_gf.json', 'cgps_gf.json', 'sampling_filter.json' ] folders = ['NP1', 'NP2'] for folder in folders: for file in files: if os.path.isfile(file): copy2(file, folder) info_np1, info_np2 = tensor.planes_from_tensor(tensor_info) keywords = {'velmodel': velmodel} os.chdir(os.path.join(sol_folder, 'NP1')) p1 = Process(target=_automatic2, args=(tensor_info, info_np1, data_type, data_prop, default_dirs, logger), kwargs=keywords) p1.start() os.chdir(os.path.join(sol_folder, 'NP2')) p2 = Process(target=_automatic2, args=(tensor_info, info_np2, data_type, data_prop, default_dirs, logger), kwargs=keywords) p2.start() [p.join() for p in [p1, p2]] logger.info('Time spent: {}'.format(time.time() - time0)) ml.close_log(logger) return
used_data = [] used_data = used_data + ['strong_motion'] if args.strong else used_data used_data = used_data + ['cgps'] if args.cgps else used_data used_data = used_data + ['tele_body'] if args.tele else used_data used_data = used_data + ['surf_tele'] if args.surface else used_data default_dirs = mng.default_dirs() if args.gcmt_tensor: cmt_file = args.gcmt_tensor tensor_info = tensor.get_tensor(cmt_file=cmt_file) else: tensor_info = tensor.get_tensor() segments, rise_time, point_sources = pl_mng.__read_planes_info() if args.ffm_solution: solution = get_outputs.read_solution_static_format(segments) if not os.path.isfile('velmodel_data.json'): vel_model = mv.select_velmodel(tensor_info, default_dirs) else: vel_model = json.load(open('velmodel_data.json')) shear = pf.shear_modulous(point_sources, velmodel=vel_model) plot_ffm_sol(tensor_info, segments, point_sources, shear, solution, vel_model, default_dirs) traces_info, stations_gps = [None, None] if args.gps: names, lats, lons, observed, synthetic, error\ = get_outputs.retrieve_gps() stations_gps = zip(names, lats, lons, observed, synthetic, error) if args.strong: traces_info = json.load(open('strong_motion_waves.json')) if args.strong or args.gps: solution = get_outputs.read_solution_static_format(segments)