def run(r_srffile, sim_id=0): """ Creates a SRF plot from an SRF file """ install = InstallCfg.getInstance() a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) # Save current directory old_cwd = os.getcwd() os.chdir(a_tmpdir) # Write slip file srfbase = r_srffile[0:r_srffile.find(".srf")] slipfile = "%s.slip" % (srfbase) cmd = ("%s/srf2xyz calc_xy=0 type=slip nseg=-1 < %s > %s" % (install.A_GP_BIN_DIR, r_srffile, slipfile)) bband_utils.runprog(cmd) # Write tinit file tinitfile = "%s.tinit" % (srfbase) cmd = ("%s/srf2xyz calc_xy=0 type=tinit nseg=-1 < %s > %s" % (install.A_GP_BIN_DIR, r_srffile, tinitfile)) bband_utils.runprog(cmd) plottitle = 'Rupture Model for %s' % (r_srffile) plot(plottitle, r_srffile, a_outdir) os.chdir(old_cwd)
def __init__(self, station_file, sim_id=0, hypo=None): self.station_file = station_file self.sim_id = sim_id self.install = InstallCfg.getInstance() self.coast_file = os.path.join(self.install.A_PLOT_DATA_DIR, "cali_coastline.mapgen") if not os.path.isfile(self.coast_file): self.coast_file = "" self.value = "GOF" self.stats = [] self.dx = 500.0 #100 mts grid resolution self.spacing = [self.dx, self.dx] self.hypo = hypo self.dim = [] self.rbounds = [] self.nw = [] self.sw = [] self.se = [] self.ne = [] self.PLOT_MAP_LOC = [0.10, 0.15, 0.8, 0.8] self.origin = [] self.offset = [] self.x_invert = False self.y_invert = False self.init_dims()
def run(self): """ Corrects the amplitudes from all stations found in the station list according to the correction coefficients provided by the user """ print("Correct PSA".center(80, '-')) # Initialize basic variables install = InstallCfg.getInstance() sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id), "%d.obs_seis.log" % (sim_id)) # Input, tmp, and output directories a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) # Station file a_statfile = os.path.join(a_indir, self.r_stations) slo = StationList(a_statfile) site_list = slo.getStationList() # Go through each station # print "Working dir: %s" % (self.proc_dir) for site in site_list: stat = site.scode print("==> Correcting amplitudes for station: %s" % (stat)) self.correct_station(stat, self.extension) print("Correct PSA Completed".center(80, '-'))
def run(r_srffile, sim_id=0): """ Creates a SRF plot from an SRF file """ install = InstallCfg.getInstance() a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) srf2xyz_bin = os.path.join(install.A_GP_BIN_DIR, "srf2xyz") # Save current directory old_cwd = os.getcwd() os.chdir(a_tmpdir) # Get number of segments num_segments = get_srf_num_segments(r_srffile) srfbase = r_srffile[0:r_srffile.find(".srf")] # Write slip and tinit files for each segment for seg in range(num_segments): slipfile = "%s_seg%d.slip" % (srfbase, seg) cmd = ("%s calc_xy=0 type=slip nseg=%d < %s > %s" % (srf2xyz_bin, seg, r_srffile, slipfile)) bband_utils.runprog(cmd) tinitfile = "%s_seg%d.tinit" % (srfbase, seg) cmd = ("%s calc_xy=0 type=tinit nseg=%d < %s > %s" % (srf2xyz_bin, seg, r_srffile, tinitfile)) bband_utils.runprog(cmd) plottitle = 'Rupture Model for %s' % (r_srffile) plot(plottitle, r_srffile, a_outdir) os.chdir(old_cwd)
def __init__(self, a_srcname=None): # Get pointers to all directories install = InstallCfg.getInstance() # Parse SRC File if a_srcname: self.CFGDICT = bband_utils.parse_src_file(a_srcname) # # Name and Path to executables self.R_UC_DECON_EXE = "deconvBBP" self.A_UC_DECON_EXE = os.path.join(install.A_UCSB_BIN_DIR, self.R_UC_DECON_EXE) self.R_SLL2XY = "statLL2XY" self.A_SLL2XY = os.path.join(install.A_UCSB_BIN_DIR, self.R_SLL2XY) self.R_STITCH = "stitchBBP" self.A_STITCH = os.path.join(install.A_UCSB_BIN_DIR, self.R_STITCH) # # Define name used when input station file is converted into a UC lat/lon version # of the station file # self.R_UC_STATION_FILE = "uc_stations.ll" self.R_UC_VS30_FILE = "stations.vs30" self.COMPS = ['000', '090', 'ver']
def __init__(self, snum=None): if snum == None: self.sim_id = seqnum.get_seq_num() else: self.sim_id = int(snum) install = InstallCfg.getInstance() logdir = install.A_OUT_LOG_DIR self.outlogfile = logdir + "/%d/%d.txt" % (self.sim_id, self.sim_id)
def __init__(self, sim_id=0, options=None): """ Initialization for BBPStatus class """ self.sim_id = sim_id self.__options = options self.install = InstallCfg.getInstance() self.outdir = os.path.join(self.install.A_OUT_DATA_DIR, str(sim_id))
def run(self): """ Generates a map showing the fault with stations """ print("Plot MAP".center(80, '-')) if (self.input_file is None or self.input_file == "" or (not self.input_file.endswith(".srf") and not self.input_file.endswith(".src"))): # We need a SRC or SRF file to get the fault geometry return install = InstallCfg.getInstance() a_indir = os.path.join(install.A_IN_DATA_DIR, str(self.sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(self.sim_id)) a_input_file = os.path.join(a_indir, self.input_file) a_station_file = os.path.join(a_indir, self.station_file) # Define boundaries to plot using the stations in the station file (self.north, self.south, self.east, self.west) = set_boundaries_from_stations(a_station_file) self.log = os.path.join(install.A_OUT_LOG_DIR, str(self.sim_id), "%d.plot_map.log" % (self.sim_id)) trace_file = "%s.trace" % (a_input_file) simple_station_file = "%s.simple" % (a_station_file) if self.input_file.endswith(".srf"): self.trace = write_fault_trace(a_input_file, trace_file) else: self.trace = write_simple_trace(a_input_file, trace_file) write_simple_stations(a_station_file, simple_station_file) map_prefix = os.path.join(a_outdir, "station_map") kml_file = os.path.join(a_outdir, "station_map.kml") # Get hypo_lon, hypo_lat from src/srf file hypo_lon, hypo_lat = fault_utils.calculate_epicenter(a_input_file) # Write the kml file self.create_kml_output(a_station_file, kml_file, hypo_lat=hypo_lat, hypo_lon=hypo_lon) # Matplotlib plottitle = 'Fault Trace with Stations' plotregion = [self.west, self.east, self.south, self.north] topo = os.path.join(install.A_PLOT_DATA_DIR, 'calTopo18.bf') coastal = os.path.join(install.A_PLOT_DATA_DIR, 'gshhs_h.txt') border = os.path.join(install.A_PLOT_DATA_DIR, 'wdb_borders_h.txt') plotter = PlotMap.PlotMap() plotter.plot(plottitle, plotregion, topo, coastal, border, trace_file, simple_station_file, map_prefix, hypo_lat=hypo_lat, hypo_lon=hypo_lon) print("Plot MAP Completed".center(80, '-'))
def plot_dist_gof(resid_file, comp_label, a_outdir_gmpe, sim_id): """ Reads data from resid_file and plots a gof distance plot all periods """ # Get directory names install = InstallCfg.getInstance() a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) # Collect all the data all_data = [] all_distances = [] all_gmpe_data = [] all_gmpe_distances = [] # Read the residuals data for period in DIST_PERIODS: summary_output = os.path.join( a_outdir, "%s-%d-resid-%.3f-%s.txt" % (comp_label, sim_id, period, COMP_EXT_RD50)) data, distance = read_resid(resid_file, period, summary_output) all_data.append(data) all_distances.append(distance) # Now do the same for the GMPE data # for period in DIST_PERIODS: # if os.path.isdir(a_outdir_gmpe): # summary_output = os.path.join(a_outdir, # "%s-%d-resid-gmpe-%.3f-%s.txt" % # (comp_label, sim_id, # period, COMP_EXT_RD50)) # data, distance = read_gmpe_resid(a_outdir_gmpe, sim_id, # period, summary_output) # all_gmpe_data.append(data) # all_gmpe_distances.append(distance) # else: # all_gmpe_data.append([]) # all_gmpe_distances.append([]) # Now create the 2 plots, 1 linear and 1 log outfile = os.path.join( a_outdir, "gof-dist-linear-%s-%d-rotd50.png" % (comp_label, sim_id)) create_dist_gof(all_data, all_distances, all_gmpe_data, all_gmpe_distances, comp_label, sim_id, outfile) outfile = os.path.join( a_outdir, "gof-dist-log-%s-%d-rotd50.png" % (comp_label, sim_id)) create_dist_gof(all_data, all_distances, all_gmpe_data, all_gmpe_distances, comp_label, sim_id, outfile, log_scale=True)
def run(self): """ Generates a map showing the fault with stations """ print("Plot MAP".center(80, '-')) if (self.input_file is None or self.input_file == "" or (not self.input_file.endswith(".srf") and not self.input_file.endswith(".src"))): # We need a SRC or SRF file to get the fault geometry return install = InstallCfg.getInstance() a_indir = os.path.join(install.A_IN_DATA_DIR, str(self.sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(self.sim_id)) a_input_file = os.path.join(a_indir, self.input_file) a_station_file = os.path.join(a_indir, self.station_file) # Define boundaries to plot using the stations in the station file, # and making sure we include the entire fault plane (self.north, self.south, self.east, self.west) = plot_utils.set_boundaries_from_stations( a_station_file, a_input_file) self.log = os.path.join(install.A_OUT_LOG_DIR, str(self.sim_id), "%d.plot_map.log" % (self.sim_id)) trace_file = "%s.trace" % (a_input_file) simple_station_file = "%s.simple" % (a_station_file) if self.input_file.endswith(".srf"): self.trace = plot_utils.write_fault_trace(a_input_file, trace_file) else: self.trace = plot_utils.write_simple_trace(a_input_file, trace_file) plot_utils.write_simple_stations(a_station_file, simple_station_file) map_prefix = os.path.join(a_outdir, "station_map") # Get hypo_lon, hypo_lat from src/srf file hypo_coord = {} hypo_lon, hypo_lat = fault_utils.calculate_epicenter(a_input_file) hypo_coord['lat'] = hypo_lat hypo_coord['lon'] = hypo_lon # Matplotlib plottitle = 'Fault Trace with Stations' plotregion = [self.west, self.east, self.south, self.north] topo = os.path.join(install.A_PLOT_DATA_DIR, 'calTopo18.bf') coastal = os.path.join(install.A_PLOT_DATA_DIR, 'gshhs_h.txt') border = os.path.join(install.A_PLOT_DATA_DIR, 'wdb_borders_h.txt') PlotMap.plot_station_map(plottitle, plotregion, topo, coastal, border, trace_file, simple_station_file, map_prefix, [hypo_coord]) print("Plot MAP Completed".center(80, '-'))
def plot_station_map_main(): """ Main function for plotting the station map """ # Parse command-line options args = parse_arguments() # Copy inputs output_file = args.outfile station_file = args.station_list plot_title = args.plot_title src_files = args.src_files first_src_file = src_files[0] # Set paths install = InstallCfg.getInstance() topo = os.path.join(install.A_PLOT_DATA_DIR, 'calTopo18.bf') coastal = os.path.join(install.A_PLOT_DATA_DIR, 'gshhs_h.txt') border = os.path.join(install.A_PLOT_DATA_DIR, 'wdb_borders_h.txt') # Define boundaries to plot using the stations in the station file, # and making sure we include the entire fault plane (north, south, east, west) = plot_utils.set_boundaries_from_stations(station_file, first_src_file) trace_file = os.path.join("/tmp", "%s.trace" % (os.path.basename(first_src_file))) simple_station_file = os.path.join( "/tmp", "%s.simple" % (os.path.basename(station_file))) trace = plot_utils.write_simple_trace(first_src_file, trace_file) plot_utils.write_simple_stations(station_file, simple_station_file) # Build a hypocenter list hypocenters = [] for src_file in src_files: # Get hypo_lon, hypo_lat from src files hypo_lon, hypo_lat = fault_utils.calculate_epicenter(src_file) hypo_coord = {} hypo_coord['lat'] = hypo_lat hypo_coord['lon'] = hypo_lon hypocenters.append(hypo_coord) # Set plot title if plot_title is None: plot_title = 'Fault Trace with Stations' plot_region = [west, east, south, north] # Matplotlib PlotMap.plot_station_map(plot_title, plot_region, topo, coastal, border, trace_file, simple_station_file, os.path.splitext(output_file)[0], hypocenters) # Delete intermediate files os.remove(trace_file) os.remove(simple_station_file)
def __init__(self, vmodel_name, a_srcname=None): install = InstallCfg.getInstance() # # Name and Path to executable # self.R_UC_FFSP_EXE = "ffsp_v2" self.A_UC_FFSP_EXE = os.path.join(install.A_UCSB_BIN_DIR, self.R_UC_FFSP_EXE) self.FFSP_OUTPUT_PREFIX = "FFSP_OUTPUT" self.FMAX = 50.0 # Nyquist -- use 50 for 100Hz vmodel_obj = velocity_models.get_velocity_model_by_name(vmodel_name) if vmodel_obj is None: raise IndexError("Cannot find velocity model: %s" % (vmodel_name)) vmodel_params = vmodel_obj.get_codebase_params('ucsb') # Configure DT based on information from velocity model if 'GF_DT' in vmodel_params: self.DT = float(vmodel_params['GF_DT']) else: raise KeyError("%s parameter missing in velocity model %s" % ("GF_DT", vmodel_name)) # Other region-specific parameters if 'RV_AVG' in vmodel_params: self.RV_AVG = float(vmodel_params['RV_AVG']) else: self.RV_AVG = 2.5 if 'TP_TR' in vmodel_params: self.TP_TR = float(vmodel_params['TP_TR']) else: self.TP_TR = 0.1 if 'LF_VELMODEL' in vmodel_params: self.A_UC_LF_VELMODEL = os.path.join(vmodel_obj.base_dir, vmodel_params['LF_VELMODEL']) else: raise KeyError("%s parameter missing in velocity model %s" % ("LF_VELMODEL", vmodel_name)) if a_srcname: self.CFGDICT = bband_utils.parse_src_file(a_srcname) # RV_AVG is optional! # If SRC file has it, it overrides the region and the default values if "rv_avg" in self.CFGDICT: self.RV_AVG = self.CFGDICT["rv_avg"]
def run(self): """ Run the GP WccSiteamp 2014 module """ print("GP Site Response".center(80, '-')) self.install = InstallCfg.getInstance() install = self.install self.config = WccSiteampCfg() config = self.config sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id), "%d.wcc_siteamp_%s.log" % (sim_id, sta_base)) a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), self.r_stations) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) progstring = "mkdir -p %s" % (a_tmpdir) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) # # Read and parse the station list with this call # slo = StationList(a_statfile) site_list = slo.getStationList() for sites in site_list: site = sites.scode vs30 = sites.vs30 if vs30 > config.VREF_MAX: vs30 = config.VREF_MAX print("*** WccSiteamp Processing station %s..." % (site)) if self.method == "GP": self.process_separate_seismograms(site, sta_base, vs30, a_indir, a_tmpdir) elif self.method == "SDSU" or self.method == "EXSIM" or self.method == "UCSB": self.process_hybrid_seismogram(site, sta_base, vs30, a_tmpdir, a_outdir) print("GP Site Response Completed".center(80, '-'))
def plot_dist_gof(resid_file, comp_label, a_outdir_gmpe, sim_id): """ Reads data from resid_file and plots a gof distance plot all periods """ # Get directory names install = InstallCfg.getInstance() a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) # Collect all the data all_data = [] all_distances = [] all_gmpe_data = [] all_gmpe_distances = [] # Read the residuals data for period in DIST_PERIODS: summary_output = os.path.join(a_outdir, "%s-%d-resid-%.3f-%s.txt" % (comp_label, sim_id, period, COMP_EXT_RD50)) data, distance = read_resid(resid_file, period, summary_output) all_data.append(data) all_distances.append(distance) # Now do the same for the GMPE data # for period in DIST_PERIODS: # if os.path.isdir(a_outdir_gmpe): # summary_output = os.path.join(a_outdir, # "%s-%d-resid-gmpe-%.3f-%s.txt" % # (comp_label, sim_id, # period, COMP_EXT_RD50)) # data, distance = read_gmpe_resid(a_outdir_gmpe, sim_id, # period, summary_output) # all_gmpe_data.append(data) # all_gmpe_distances.append(distance) # else: # all_gmpe_data.append([]) # all_gmpe_distances.append([]) # Now create the 2 plots, 1 linear and 1 log outfile = os.path.join(a_outdir, "gof-dist-linear-%s-%d-rotd50.png" % (comp_label, sim_id)) create_dist_gof(all_data, all_distances, all_gmpe_data, all_gmpe_distances, comp_label, sim_id, outfile) outfile = os.path.join(a_outdir, "gof-dist-log-%s-%d-rotd50.png" % (comp_label, sim_id)) create_dist_gof(all_data, all_distances, all_gmpe_data, all_gmpe_distances, comp_label, sim_id, outfile, log_scale=True)
def run(self): """ Calculate GMPEs, create bias plot comparisons """ print("Calculate GMPE".center(80, '-')) # Initialize basic variables install = InstallCfg.getInstance() sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) # Input, tmp, and output directories a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_outdir_gmpe = os.path.join(a_outdir, "gmpe_data_%s" % (sta_base)) a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id)) self.log = os.path.join(a_logdir, "%d.gmpe_compare.log" % (sim_id)) # # Make sure the output and tmp directories exist # dirs = [a_outdir_gmpe, a_outdir, a_logdir] bband_utils.mkdirs(dirs, print_cmd=False) # Source file, parse it! a_srcfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), self.r_src_file) self.src_keys = bband_utils.parse_src_file(a_srcfile) # Station file a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), self.r_stations) slo = StationList(a_statfile) site_list = slo.getStationList() # Go through each station, and print comparison headers for # the first station we process for site in site_list: stat = site.scode print("==> Calculating GMPE for station: %s" % (stat)) output_file = os.path.join(a_outdir_gmpe, "%s-gmpe.ri50" % (stat)) self.calculate_gmpe(site, output_file) # All done print("Calculate GMPE Completed".center(80, '-'))
def __init__(self): """ Initialize class structures """ self.install = InstallCfg.getInstance() self.output_sim_id = None self.a_indir = None self.a_outdir = None self.a_logdir = None self.a_tmpdir = None self.station_list = None self.input_sims = [] self.src_files = [] self.srf_files = [] self.val_event = None
def run(self): """ Goes through the station list and copy each low-frequency seismogram from the seis_dir to the simulation's tmpdir """ install = InstallCfg.getInstance() sim_id = self.sim_id a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) a_stations = os.path.join(a_indir, self.r_stations) print(self.seis_dir) slo = StationList(a_stations) stat_list = slo.getStationList() for stat in stat_list: # Look for bbp seismogram, copy in print("%s/%s-lf.bbp" % (self.seis_dir, stat.scode)) if os.path.exists("%s/%s-lf.bbp" % (self.seis_dir, stat.scode)): print("Copying for site %s" % (stat.scode)) # Need to eliminate negative times fp_in = open("%s/%s-lf.bbp" % (self.seis_dir, stat.scode), 'r') fp_out = open("%s/%d.%s-lf.bbp" % (a_tmpdir, sim_id, stat.scode), 'w') for line in fp_in: pieces = line.split() try: if pieces[0] == '#' or pieces[0] == '%': fp_out.write(line) elif float(pieces[0]) < -0.0001: continue elif float(pieces[0]) < 0.0001: fp_out.write("0.0\t%s\t%s\t%s\n" % (pieces[1], pieces[2], pieces[3])) else: fp_out.write(line) except ValueError: fp_out.write(line) fp_in.close() fp_out.flush() fp_out.close() else: print("Could not find LF seismogram for station %s!" % (stat.scode))
def setUp(self): """ Configures the environment for the tests """ self.install = InstallCfg.getInstance() self.sim_id = int(seqnum.get_seq_num()) # Make sure all directories exist self.indir = os.path.join(self.install.A_IN_DATA_DIR, str(self.sim_id)) self.tmpdir = os.path.join(self.install.A_TMP_DATA_DIR, str(self.sim_id)) self.outdir = os.path.join(self.install.A_OUT_DATA_DIR, str(self.sim_id)) self.logdir = os.path.join(self.install.A_OUT_LOG_DIR, str(self.sim_id)) bband_utils.mkdirs([self.indir, self.tmpdir, self.outdir, self.logdir], print_cmd=False)
def __init__(self, resume=True): install = InstallCfg.getInstance() self.resume = resume self.resume_list = [] # Read checkpoint file if self.resume == True: resume_file = os.path.join(install.A_OUT_LOG_DIR, "gen_resume.txt") if os.path.exists(resume_file): resume_fp = open(resume_file, 'r') self.resume_list = resume_fp.read().splitlines() resume_fp.close() else: self.resume = False # Setup paths self.input_dir = os.path.join(install.A_TEST_REF_DIR, "accept_inputs") self.ref_dir = os.path.join(install.A_TEST_REF_DIR, "accept_refs")
def __init__(self, i_r_stations, i_r_srcfile, plot_vel, plot_acc, sim_id=0): """ Initialize basic class parameters """ self.r_stations = i_r_stations self.plot_vel = plot_vel self.plot_acc = plot_acc self.sim_id = sim_id install = InstallCfg.getInstance() a_indir = os.path.join(install.A_IN_DATA_DIR, str(self.sim_id)) if i_r_srcfile is not None and i_r_srcfile != "": i_a_srcfile = os.path.join(a_indir, i_r_srcfile) self.src_keys = bband_utils.parse_src_file(i_a_srcfile) else: self.src_keys = None
def generate_xml(self, optfiles): install = InstallCfg.getInstance() # Generate xml workflows tests = [] for key in optfiles.keys(): sim_id = int(seqnum.get_seq_num()) test = key.split('.')[0] xmlfile = os.path.join(self.input_dir, "%s.xml" % (test)) if os.path.basename(xmlfile) in self.resume_list: # Skip this test print("Skipping %s" % (key)) continue print("Generating %s" % (key)) optfile = os.path.join(self.input_dir, key) # Save the option file op = open(optfile, 'w') for line in optfiles[key]: op.write("%s\n" % (line)) op.close() # Generate xml print("Generating xml for %s" % (key)) print("\t %s" % (str(optfiles[key]))) cmd = ("%s --expert -s %d -g -o %s" % (os.path.join( install.A_COMP_DIR, "run_bbp.py"), sim_id, optfile)) print("Running: %s" % (cmd)) rc = bband_utils.runprog(cmd, False) if rc != 0: print("Failed to run bbp, aborting.") return [] oldxmlfile = os.path.join(install.A_XML_DIR, "%d.xml" % (sim_id)) shutil.copy2(oldxmlfile, xmlfile) if not os.path.exists(xmlfile): print("Workflow %s not found, aborting." % (xmlfile)) return [] tests.append([sim_id, xmlfile]) time.sleep(1) return tests
def run_tests(self, tests): install = InstallCfg.getInstance() # Run the tests and save results as reference data for test in tests: if os.path.basename(test[1]) in self.resume_list: # Skip this test print("Skipping %s" % (os.path.basename(test[1]))) continue # Execute each test cmd = ("%s -s %d -x %s" % (os.path.join( install.A_COMP_DIR, "run_bbp.py"), test[0], test[1])) rc = bband_utils.runprog(cmd, False) if rc != 0: print("Failed to run acceptance test %d-%s, aborting." % (test[0], test[1])) return 1 # Save the bbp and rd50 files test_name = os.path.basename(test[1]).split('.')[0] cmd = "mkdir -p %s" % (os.path.join(self.ref_dir, test_name)) bband_utils.runprog(cmd) rd50files = glob.glob("%s/%d/%d.*.rd50" % (install.A_OUT_DATA_DIR, test[0], test[0])) if len(rd50files) < 1: print("Did not find expected RotD50 files") return 1 for rd50_file in rd50files: filecomps = os.path.basename(rd50_file).split('.') shutil.copy2( rd50_file, os.path.join(self.ref_dir, test_name, "%s.rd50" % (filecomps[1]))) # Write progress to checkpoint file resume_fp = open( os.path.join(install.A_OUT_LOG_DIR, "gen_resume.txt"), 'a') resume_fp.write("%s\n" % os.path.basename(test[1])) resume_fp.flush() resume_fp.close() return 0
def generate_xml(self, optfiles): install = InstallCfg.getInstance() # Generate xml workflows tests = [] for key in optfiles.keys(): sim_id = int(seqnum.get_seq_num()) test = key.split('.')[0] xmlfile = os.path.join(self.input_dir, "%s.xml" % (test)) if os.path.basename(xmlfile) in self.resume_list: # Skip this test print("Skipping %s" % (key)) continue print("Generating %s" % (key)) optfile = os.path.join(self.input_dir, key) # Save the option file op = open(optfile, 'w') for line in optfiles[key]: op.write("%s\n" % (line)) op.close() # Generate xml print("Generating xml for %s" % (key)) print("\t %s" % (str(optfiles[key]))) cmd = ("%s/run_bbp.py --expert -s %d -g -o %s" % (install.A_COMP_DIR, sim_id, optfile)) print("Running: %s" % (cmd)) rc = bband_utils.runprog(cmd, False) if rc != 0: print("Failed to run bbp, aborting.") return [] oldxmlfile = os.path.join(install.A_XML_DIR, "%d.xml" % (sim_id)) shutil.copy2(oldxmlfile, xmlfile) if not os.path.exists(xmlfile): print("Workflow %s not found, aborting." % (xmlfile)) return [] tests.append([sim_id, xmlfile]) time.sleep(1) return tests
def get_magnitude(velfile, srffile, suffix="tmp"): """ Scans the srffile and returns the magnitude of the event """ magfile = "srf2moment_%s.out" % (suffix) install = InstallCfg.getInstance() cmd = ("%s velfile=%s < %s 2> %s" % (os.path.join( install.A_GP_BIN_DIR, "srf2moment"), velfile, srffile, magfile)) bband_utils.runprog(cmd, False) srf2moment_fp = open(magfile, 'r') srf2moment_data = srf2moment_fp.readlines() srf2moment_fp.close() #magnitude on last line mag_line = srf2moment_data[len(srf2moment_data) - 4] pieces = mag_line.split() magnitude = float(pieces[5].split(")")[0]) cmd = "rm %s" % (magfile) bband_utils.runprog(cmd, False) return magnitude
def run_tests(self, tests): install = InstallCfg.getInstance() # Run the tests and save results as reference data for test in tests: if os.path.basename(test[1]) in self.resume_list: # Skip this test print("Skipping %s" % (os.path.basename(test[1]))) continue # Execute each test cmd = ("%s/run_bbp.py -s %d -x %s" % (install.A_COMP_DIR, test[0], test[1])) rc = bband_utils.runprog(cmd, False) if rc != 0: print("Failed to run acceptance test %d-%s, aborting." % (test[0], test[1])) return 1 # Save the bbp and rsp files test_name = os.path.basename(test[1]).split('.')[0] cmd = "mkdir -p %s" % (os.path.join(self.ref_dir, test_name)) bband_utils.runprog(cmd) rd50files = glob.glob("%s/%d/%d.*.rd50" % (install.A_OUT_DATA_DIR, test[0], test[0])) if len(rd50files) < 1: print("Did not find expected RotD50 files") return 1 for rd50_file in rd50files: filecomps = os.path.basename(rd50_file).split('.') shutil.copy2(rd50_file, os.path.join(self.ref_dir, test_name, "%s.rd50" % (filecomps[1]))) # Write progress to checkpoint file resume_fp = open(os.path.join(install.A_OUT_LOG_DIR, "gen_resume.txt"), 'a') resume_fp.write("%s\n" % os.path.basename(test[1])) resume_fp.flush() resume_fp.close() return 0
def get_magnitude(velfile, srffile, suffix="tmp"): """ Scans the srffile and returns the magnitude of the event """ magfile = "srf2moment_%s.out" % (suffix) install = InstallCfg.getInstance() cmd = ("%s velfile=%s < %s 2> %s" % (os.path.join(install.A_GP_BIN_DIR, "srf2moment"), velfile, srffile, magfile)) bband_utils.runprog(cmd, False) srf2moment_fp = open(magfile, 'r') srf2moment_data = srf2moment_fp.readlines() srf2moment_fp.close() #magnitude on last line mag_line = srf2moment_data[len(srf2moment_data) - 4] pieces = mag_line.split() magnitude = float(pieces[5].split(")")[0]) cmd = "rm %s" % (magfile) bband_utils.runprog(cmd, False) return magnitude
def get_hypocenter(srffile, suffix="tmp"): """ Looks up the hypocenter of an event in a srffile """ hypfile = "srf_hypo_%s" % (suffix) install = InstallCfg.getInstance() cmd = ("%s < %s > %s" % (os.path.join(install.A_GP_BIN_DIR, "srf_gethypo"), srffile, hypfile)) bband_utils.runprog(cmd) srf_hypo_fp = open(hypfile, 'r') srf_hypo_data = srf_hypo_fp.readline() srf_hypo_fp.close() srf_hypo = srf_hypo_data.split() hypo = [] for i in range(0, 3): hypo.append(float(srf_hypo[i])) cmd = "rm %s" % (hypfile) bband_utils.runprog(cmd) return hypo
def get_hypocenter(srffile, suffix="tmp"): """ Looks up the hypocenter of an event in a srffile """ hypfile = "srf_hypo_%s" % (suffix) install = InstallCfg.getInstance() cmd = ( "%s < %s > %s" % (os.path.join(install.A_GP_BIN_DIR, "srf_gethypo"), srffile, hypfile)) bband_utils.runprog(cmd) srf_hypo_fp = open(hypfile, 'r') srf_hypo_data = srf_hypo_fp.readline() srf_hypo_fp.close() srf_hypo = srf_hypo_data.split() hypo = [] for i in range(0, 3): hypo.append(float(srf_hypo[i])) cmd = "rm %s" % (hypfile) bband_utils.runprog(cmd) return hypo
def uc_create_fault_global(a_faultfile, sim_id, r_srcfile, i_vmodel_name, r_velmodel, r_srffile): """ This fuction creates the faultfile with the parameters specified in the src_file and velocity model """ install = InstallCfg.getInstance() a_srcfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), r_srcfile) # since KinModel appends .srf automatically, make sure it's not # already there if r_srffile.endswith(".srf"): r_srffile = r_srffile[0:len(r_srffile) - 4] # Read source file cfg = UCrmgCfg(i_vmodel_name, a_srcfile) # Write configuration file print("Creating %s" % (a_faultfile)) fault_file = open(a_faultfile, "w") fault_file.write( "%.3f %.3f %.1f\n" % (cfg.CFGDICT["lon_top_center"], cfg.CFGDICT["lat_top_center"], cfg.CFGDICT["depth_to_top"])) fault_file.write("%.2f %.2f\n" % (cfg.CFGDICT["fault_length"], cfg.CFGDICT["fault_width"])) fault_file.write( "%.f. %.f. %.f.\n" % (cfg.CFGDICT["strike"], cfg.CFGDICT["dip"], cfg.CFGDICT["rake"])) fault_file.write( "%.1f %.1f\n" % (cfg.CFGDICT["hypo_along_stk"], cfg.CFGDICT["hypo_down_dip"])) fault_file.write("%4.2f\n" % (cfg.CFGDICT['magnitude'])) fault_file.write("%.3f %.3f\n" % (cfg.CFGDICT["dlen"], cfg.CFGDICT["dwid"])) fault_file.write("%d\n" % (cfg.CFGDICT['seed'])) fault_file.write("%.2f\n" % (cfg.DT)) fault_file.write("%.2f\n" % (cfg.CFGDICT['corner_freq'])) fault_file.write("%s\n" % (r_velmodel)) fault_file.write("%s\n" % (r_srffile)) fault_file.close()
def uc_create_fault_global(a_faultfile, sim_id, r_srcfile, i_vmodel_name, r_velmodel, r_srffile): """ This fuction creates the faultfile with the parameters specified in the src_file and velocity model """ install = InstallCfg.getInstance() a_srcfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), r_srcfile) # since KinModel appends .srf automatically, make sure it's not # already there if r_srffile.endswith(".srf"): r_srffile = r_srffile[0:len(r_srffile) - 4] # Read source file cfg = UCrmgCfg(i_vmodel_name, a_srcfile) # Write configuration file print("Creating %s" % (a_faultfile)) fault_file = open(a_faultfile, "w") fault_file.write("%.3f %.3f %.1f\n" % (cfg.CFGDICT["lon_top_center"], cfg.CFGDICT["lat_top_center"], cfg.CFGDICT["depth_to_top"])) fault_file.write("%.2f %.2f\n" % (cfg.CFGDICT["fault_length"], cfg.CFGDICT["fault_width"])) fault_file.write("%.f. %.f. %.f.\n" % (cfg.CFGDICT["strike"], cfg.CFGDICT["dip"], cfg.CFGDICT["rake"])) fault_file.write("%.1f %.1f\n" % (cfg.CFGDICT["hypo_along_stk"], cfg.CFGDICT["hypo_down_dip"])) fault_file.write("%4.2f\n" % (cfg.CFGDICT['magnitude'])) fault_file.write("%.3f %.3f\n" % (cfg.CFGDICT["dlen"], cfg.CFGDICT["dwid"])) fault_file.write("%d\n" % (cfg.CFGDICT['seed'])) fault_file.write("%.2f\n" % (cfg.DT)) fault_file.write("%.2f\n" % (cfg.CFGDICT['corner_freq'])) fault_file.write("%s\n" % (r_velmodel)) fault_file.write("%s\n" % (r_srffile)) fault_file.close()
def test_xy2ll(self): """ ll2xy mlon=-118 mlat=34 xazim=0 < stats.ll > stats_out.xy This will return a file with a different suffix to identify the contents """ self.install = InstallCfg.getInstance() self.sim_id = int(seqnum.get_seq_num()) # Make sure all directories exist self.indir = os.path.join(self.install.A_IN_DATA_DIR, str(self.sim_id)) self.tmpdir = os.path.join(self.install.A_TMP_DATA_DIR, str(self.sim_id)) self.outdir = os.path.join(self.install.A_OUT_DATA_DIR, str(self.sim_id)) self.logdir = os.path.join(self.install.A_OUT_LOG_DIR, str(self.sim_id)) bband_utils.mkdirs([self.indir, self.tmpdir, self.outdir, self.logdir]) ilon = -118.0 ilat = 34.0 iaz = 0.0 infile = os.path.join(self.indir, "100.bob.ll") ofile = os.path.join(self.outdir, "100.bob.xy") reffile = os.path.join(self.install.A_TEST_REF_DIR, "sdsu", "100.bob.xy") # Write input file in_file = open(infile, "w") data = "%f %f\n" % (ilon, ilat) in_file.write(data) in_file.close() # Run the test cc.ll2xy(infile, ofile, ilon, ilat, iaz) # Check output self.failIf( filecmp.cmp(reffile, ofile) == False, "LL to XY did not work")
def run(self): """ Runs the match module to merge low and high frequency seismograms """ print("Match".center(80, '-')) install = InstallCfg.getInstance() config = MatchCfg() sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id), "%d.match_%s.log" % (sim_id, sta_base)) a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), self.r_stations) a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) # Make sure tmpdir exists dirs = [a_tmpdir] bband_utils.mkdirs(dirs, print_cmd=False) pow2_param = 0 if self.pow2: pow2_param = 1 # Start with defaults self.phase = config.PHASE self.hf_fhi = config.HF_FHI self.lf_flo = config.LF_FLO # Set match method if config.MATCH_METHOD == 1: self.phase = 1 elif config.MATCH_METHOD == 2: val = 1.0 / (2.0 * config.HF_ORD) self.hf_fhi = (self.hf_fhi * math.exp(val * math.log(math.sqrt(2.0) - 1.0))) val = -1.0 / (2.0 * config.LF_ORD) self.lf_flo = (self.lf_flo * math.exp(val * math.log(math.sqrt(2.0) - 1.0))) # # Read and parse the station list with this call # slo = StationList(a_statfile) site_list = slo.getStationList() # Get pointer to the velocity model object vel_obj = velocity_models.get_velocity_model_by_name(self.vmodel_name) if vel_obj is None: raise bband_utils.ParameterError("Cannot find velocity model: %s" % (self.vmodel_name)) # Check for velocity model-specific parameters vmodel_params = vel_obj.get_codebase_params('gp') # Figure out what DT we should use when resampling # Figure out the LF DT value if self.acc: seis_ext = '.acc.bbp' else: seis_ext = '.bbp' lf_seis = None # Find one LF seismogram for sites in site_list: site = sites.scode if os.path.exists(os.path.join(a_tmpdir, "%d.%s-lf%s" % (sim_id, site, seis_ext))): lf_seis = os.path.join(a_tmpdir, "%d.%s-lf%s" % (sim_id, site, seis_ext)) break # Need one file if lf_seis is None: raise bband_utils.ParameterError("Cannot find a LF seismogram") # Pick DT from this file lf_dt = None lf_file = open(lf_seis) for line in lf_file: line = line.strip() if line.startswith("#") or line.startswith("%"): continue # Got to first timestamp. Now, pick two consecutive # timestamps values lf_t1 = float(line.strip().split()[0]) lf_t2 = float(lf_file.next().strip().split()[0]) # Subtract the two times lf_dt = lf_t2 - lf_t1 # All done! break lf_file.close() if lf_dt is None: raise bband_utils.ParameterError("Cannot find LF_DT!") # lf_dt *should* match the gf_dt used by jbsim #if not 'GF_DT' in vmodel_params: # raise bband_utils.ParameterError("Cannot find GF_DT parameter in " # "velocity model: %s" % # (self.vmodel_name)) # In the GP method, we can potentially have two independent DT # values, one used by the rupture generator and the # low-frequency jbsim seismogram simulator, and another value # used by the high-frequency hfsims program. We have to use # the smaller of these two values in order to properly combine # the low-, and high-frequency seismograms. #gf_dt = float(vmodel_params['GF_DT']) if 'HF_DT' in vmodel_params: hf_dt = float(vmodel_params['HF_DT']) else: hf_dt = config.NEW_HFDT new_dt = min(lf_dt, hf_dt) # Go through the stations for sites in site_list: # Pick station name site = sites.scode # # We have a verbose of silent invocation. This is a very # verbose program so our default writes to dev/null # # # There are multiple possibilities; either we have # separate HF and LF files, we have HF and .bbp, LF and # .bbp, or just .bbp. In all cases, we need to separate # them to get components. # hf_exists = False lf_exists = False if not self.acc: print("==> Processing velocity seismograms for station: %s" % (site)) # Need to convert to acc first if os.path.exists(os.path.join(a_tmpdir, "%d.%s-hf.bbp" % (sim_id, site))): hf_exists = True if os.path.exists(os.path.join(a_tmpdir, "%d.%s-lf.bbp" % (sim_id, site))): lf_exists = True # If no files exist for this station, make a note and continue if not hf_exists and not lf_exists: print("===> No velocity seismograms found!") print("===> Skipping station...") continue # First process HF files to convert velocity to acceleration # Create path names and check if their sizes are # within bounds nsfile = os.path.join(a_tmpdir, "%d.%s-hf.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s-hf.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s-hf.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s-hf.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) # Run wcc2bbp cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "wcc2bbp=0 < %s >> %s 2>&1" % (bbpfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) for comp in config.COMPS: # Create path names and check if their sizes # are within bounds filein = os.path.join(a_tmpdir, "%d.%s-hf.%s" % (sim_id, site, comp)) fileout = os.path.join(a_tmpdir, "%d.%s-hf.acc.%s" % (sim_id, site, comp)) bband_utils.check_path_lengths([filein, fileout], bband_utils.GP_MAX_FILENAME) cmd = ("%s diff=1 " % (os.path.join(install.A_GP_BIN_DIR, "integ_diff")) + "filein=%s fileout=%s" % (filein, fileout)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "units=cm/s/s wcc2bbp=1 > %s 2>> %s" % (bbpfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # Then process LF files to convert velocity to acceleration # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s-lf.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s-lf.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s-lf.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s-lf.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "wcc2bbp=0 < %s >> %s 2>&1" % (bbpfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) for comp in config.COMPS: # Create path names and check if their sizes # are within bounds filein = os.path.join(a_tmpdir, "%d.%s-lf.%s" % (sim_id, site, comp)) fileout = os.path.join(a_tmpdir, "%d.%s-lf.acc.%s" % (sim_id, site, comp)) bband_utils.check_path_lengths([filein, fileout], bband_utils.GP_MAX_FILENAME) cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "integ_diff")) + "diff=1 filein=%s fileout=%s" % (filein, fileout)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "units=cm/s/s wcc2bbp=1 > %s 2>> %s" % (bbpfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # We should have acceleration files at this point hf_exists = False lf_exists = False if os.path.exists(os.path.join(a_tmpdir, "%d.%s-hf.acc.bbp" % (sim_id, site))): hf_exists = True if os.path.exists(os.path.join(a_tmpdir, "%d.%s-lf.acc.bbp" % (sim_id, site))): lf_exists = True print("==> Processing acceleration seismograms for station: %s" % (site)) # If no files exist for this station, make a note and continue if not hf_exists and not lf_exists: print("===> No acceleration seismograms found!") print("===> Skipping station...") continue # # Convert HF file to wcc components # # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s-hf.acc.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) progstring = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "wcc2bbp=0 < %s >> %s 2>&1" % (bbpfile, self.log)) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) # # Convert LF file to wcc components # # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s-lf.acc.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) progstring = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "wcc2bbp=0 < %s >> %s 2>&1" % (bbpfile, self.log)) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) # # Process each component # for entries in config.COMPS: compo = entries # # HF First # listfile = os.path.join(a_tmpdir, "%s.%s.hf.%s" % (config.FILTLIST, sta_base, compo)) bband_utils.check_path_lengths([listfile], bband_utils.GP_MAX_FILENAME) # Create wcc_tfilter input file out = open(listfile, 'w') # Contains HF input file infile = os.path.join(a_tmpdir, "%d.%s-hf.acc.%s" % (sim_id, site, compo)) out.write("%s\n" % infile) out.flush() out.close() # Also check infile bband_utils.check_path_lengths([infile], bband_utils.GP_MAX_FILENAME) # # Pre-filter and resample HF file # shutil.copy2(infile, "%s.prefilter" % infile) progstring = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc_tfilter")) + "filelist=%s order=%d fhi=%f flo=%s " % (listfile, config.HF_ORD, self.hf_fhi, config.HF_FLO) + "inbin=0 outbin=0 phase=%d " % (self.phase) + "outpath=%s >> %s 2>&1" % (a_tmpdir, self.log)) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) outfile = os.path.join(a_tmpdir, "%d.%s-hf-resamp.%s" % (sim_id, site, compo)) bband_utils.check_path_lengths([outfile], bband_utils.GP_MAX_FILENAME) progstring = ("%s newdt=%f " % (os.path.join(install.A_GP_BIN_DIR, "wcc_resamp_arbdt"), new_dt) + "pow2=%d infile=%s outfile=%s >> %s 2>&1" % (pow2_param, infile, outfile, self.log)) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) # # LF Next # listfile = os.path.join(a_tmpdir, "%s.%s.lf.%s" % (config.FILTLIST, sta_base, compo)) bband_utils.check_path_lengths([listfile], bband_utils.GP_MAX_FILENAME) # Create wcc_tfilter input file out = open(listfile, 'w') # Contains LF input file infile = os.path.join(a_tmpdir, "%d.%s-lf.acc.%s" % (sim_id, site, compo)) out.write("%s\n" % infile) out.flush() out.close() # Also check infile bband_utils.check_path_lengths([infile], bband_utils.GP_MAX_FILENAME) # # Pre-filter and resample LF file # shutil.copy2(infile, "%s.prefilter" % infile) if not self.using_3d: progstring = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc_tfilter")) + "filelist=%s order=%d fhi=%f flo=%s " % (listfile, config.LF_ORD, config.LF_FHI, self.lf_flo) + "inbin=0 outbin=0 phase=%d " % (self.phase) + "outpath=%s >> %s 2>&1 " % (a_tmpdir, self.log)) bband_utils.runprog(progstring, print_cmd=False) outfile = os.path.join(a_tmpdir, "%d.%s-lf-resamp.%s" % (sim_id, site, compo)) bband_utils.check_path_lengths([outfile], bband_utils.GP_MAX_FILENAME) progstring = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc_resamp_arbdt")) + "newdt=%f pow2=%d " % (new_dt, pow2_param) + "infile=%s outfile=%s >> %s 2>&1" % (infile, outfile, self.log)) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) # # Add LF and HF resampled acc seismograms # # Check all path lengths infile1 = os.path.join(a_tmpdir, "%d.%s-lf-resamp.%s" % (sim_id, site, compo)) infile2 = os.path.join(a_tmpdir, "%d.%s-hf-resamp.%s" % (sim_id, site, compo)) outfile = os.path.join(a_tmpdir, "%d.%s.acc.add.%s" % (sim_id, site, compo)) bband_utils.check_path_lengths([infile1, infile2, outfile], bband_utils.GP_MAX_FILENAME) progstring = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc_add")) + "f1=1.00 t1=%f inbin1=0 infile1=%s " % (config.LF_TSTART, infile1) + "f2=1.00 t2=%f inbin2=0 infile2=%s " % (config.HF_TSTART, infile2) + "outbin=0 outfile=%s >> %s 2>&1" % (outfile, self.log)) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) # # Create combined velocity files # # Check path lengths filein = os.path.join(a_tmpdir, "%d.%s.acc.add.%s" % (sim_id, site, compo)) fileout = os.path.join(a_tmpdir, "%d.%s.%s" % (sim_id, site, compo)) bband_utils.check_path_lengths([filein, fileout], bband_utils.GP_MAX_FILENAME) cmd = ("%s integ=1 filein=%s fileout=%s" % (os.path.join(install.A_GP_BIN_DIR, "integ_diff"), filein, fileout)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # We have all the component files, create velocity seismogram # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) progstring = ("%s wcc2bbp=1 " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + 'title="Sim NGAH, stat=%s" ' % site + 'nsfile=%s ewfile=%s udfile=%s > %s 2>> %s' % (nsfile, ewfile, udfile, bbpfile, self.log)) bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False) # Copy velocity bbp file to outdir shutil.copy2(os.path.join(a_tmpdir, "%d.%s.bbp" % (sim_id, site)), os.path.join(a_outdir, "%d.%s.vel.bbp" % (sim_id, site))) # Also create acceleration bbp file in outdir # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "wcc2bbp=0 < %s >> %s 2>&1" % (bbpfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) for comp in config.COMPS: # Create path names and check if their sizes are within bounds filein = os.path.join(a_tmpdir, "%d.%s.%s" % (sim_id, site, comp)) fileout = os.path.join(a_tmpdir, "%d.%s.acc.%s" % (sim_id, site, comp)) bband_utils.check_path_lengths([filein, fileout], bband_utils.GP_MAX_FILENAME) cmd = ("%s diff=1 filein=%s fileout=%s" % (os.path.join(install.A_GP_BIN_DIR, "integ_diff"), filein, fileout)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "%d.%s.acc.000" % (sim_id, site)) ewfile = os.path.join(a_tmpdir, "%d.%s.acc.090" % (sim_id, site)) udfile = os.path.join(a_tmpdir, "%d.%s.acc.ver" % (sim_id, site)) bbpfile = os.path.join(a_tmpdir, "%d.%s.acc.bbp" % (sim_id, site)) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "units=cm/s/s wcc2bbp=1 > %s 2>> %s" % (bbpfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # Copy acceleration bbp file to outdir shutil.copy2(os.path.join(a_tmpdir, "%d.%s.acc.bbp" % (sim_id, site)), os.path.join(a_outdir, "%d.%s.acc.bbp" % (sim_id, site))) print("Match Completed".center(80, '-'))
sys.exit(1) # Create temp dir TMPDIR = tempfile.mkdtemp(prefix="bbp-") resid_file = os.path.join(TMPDIR, "bbp-rd50-resid.txt") log_file = os.path.join(TMPDIR, "bbp-rd50-resid.log") # Get input parameters station_list = sys.argv[1] src_file = sys.argv[2] sim_id_1 = int(sys.argv[3]) sim_id_2 = int(sys.argv[4]) output_dir = sys.argv[5] # Create directory paths install = InstallCfg.getInstance() config = GPGofCfg() a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id_1)) a_outdir1 = os.path.join(install.A_OUT_DATA_DIR, str(sim_id_1)) a_outdir2 = os.path.join(install.A_OUT_DATA_DIR, str(sim_id_2)) # Src file a_srcfile = os.path.join(a_indir, src_file) src_keys = bband_utils.parse_src_file(a_srcfile) # Station file a_statfile = os.path.join(a_indir, station_list) slo = StationList(a_statfile) site_list = slo.getStationList() # Capture event_label
def run(self): """ Runs the GMPEs for the six parameters in Rezaeian (2015) """ print("RZZ2015 GMPE".center(80, '-')) # Load configuration, set sim_id install = InstallCfg.getInstance() sim_id = self.sim_id # Build directory paths a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id)) a_validation_outdir = os.path.join(a_outdir, "validations", "rzz2015_gmpe") # Make sure the output and tmp directories exist bband_utils.mkdirs([a_tmpdir, a_indir, a_outdir, a_validation_outdir], print_cmd=False) # Source file, parse it! a_srcfile = os.path.join(a_indir, self.srcfile) self.src_keys = bband_utils.parse_src_file(a_srcfile) # Now the file paths self.log = os.path.join(a_logdir, "%d.rzz2015gmpe.log" % (sim_id)) sta_file = os.path.join(a_indir, self.stations) # Get station list slo = StationList(sta_file) site_list = slo.getStationList() # Initialize random seed np.random.seed(int(self.src_keys['seed'])) # Create output file, add header out_file = open( os.path.join(a_validation_outdir, '%d.rzz2015gmpe.txt' % (self.sim_id)), 'w') out_file.write("#station, r_rup, vs_30," " ai_mean, d595_mean, tmid_mean," " wmid_mean, wslp_mean, zeta_mean," " ai_stddev, d595_stddev, tmid_stddev," " wmid_stddev, wslp_stddev, zeta_stddev\n") # Go through each station for site in site_list: stat = site.scode print("==> Processing station: %s" % (stat)) # Calculate Rrup origin = (self.src_keys['lon_top_center'], self.src_keys['lat_top_center']) dims = (self.src_keys['fault_length'], self.src_keys['dlen'], self.src_keys['fault_width'], self.src_keys['dwid'], self.src_keys['depth_to_top']) mech = (self.src_keys['strike'], self.src_keys['dip'], self.src_keys['rake']) site_geom = [float(site.lon), float(site.lat), 0.0] (fault_trace1, up_seis_depth, low_seis_depth, ave_dip, dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech) _, rrup, _ = putils.DistanceToSimpleFaultSurface( site_geom, fault_trace1, up_seis_depth, low_seis_depth, ave_dip) vs30 = site.vs30 mag = self.src_keys['magnitude'] # Fault type is 1 (Reverse) unless condition below is met # Then it is 0 (Strike-pp) fault_type = 1 rake = self.src_keys['rake'] if ((rake >= -180 and rake < -150) or (rake >= -30 and rake <= 30) or (rake > 150 and rake <= 180)): fault_type = 0 #rrup = 13.94 #fault_type = 1 #vs30 = 659.6 #mag = 7.35 [ai_mean, d595_mean, tmid_mean, wmid_mean, wslp_mean, zeta_mean] = self.calculate_mean_values(rrup, vs30, mag, fault_type) # Randomize parameters using standard deviations and correlations sta_ai = [] sta_d595 = [] sta_tmid = [] sta_wmid = [] sta_wslp = [] sta_zeta = [] # Simulate number_of_samples realizations of the error # term for each parameter for _ in range(0, self.number_of_samples): # Simulate zero-mean normal correlated parameters with # stdv = sqrt(sigmai^2+taui^2) # totalerror = eps+etha=[eps1+etha1 eps2+etha2 eps3+etha3 eps4+etha4 # eps5+etha5 eps6+etha6] # mean error vector # m_totalerror = [0, 0, 0, 0, 0, 0] # Covariance matrix std1 = np.sqrt(self.sigma1**2 + self.tau1**2) std2 = np.sqrt(self.sigma2**2 + self.tau2**2) std3 = np.sqrt(self.sigma3**2 + self.tau3**2) std4 = np.sqrt(self.sigma4**2 + self.tau4**2) std5 = np.sqrt(self.sigma5**2 + self.tau5**2) std6 = np.sqrt(self.sigma6**2 + self.tau6**2) s_total_error = [ [ std1**2, std1 * std2 * self.rho_totalerror[0][1], std1 * std3 * self.rho_totalerror[0][2], std1 * std4 * self.rho_totalerror[0][3], std1 * std5 * self.rho_totalerror[0][4], std1 * std6 * self.rho_totalerror[0][5] ], [ std2 * std1 * self.rho_totalerror[1][0], std2**2, std2 * std3 * self.rho_totalerror[1][2], std2 * std4 * self.rho_totalerror[1][3], std2 * std5 * self.rho_totalerror[1][4], std2 * std6 * self.rho_totalerror[1][5] ], [ std3 * std1 * self.rho_totalerror[2][0], std3 * std2 * self.rho_totalerror[2][1], std3**2, std3 * std4 * self.rho_totalerror[2][3], std3 * std5 * self.rho_totalerror[2][4], std3 * std6 * self.rho_totalerror[2][5] ], [ std4 * std1 * self.rho_totalerror[3][0], std4 * std2 * self.rho_totalerror[3][1], std4 * std3 * self.rho_totalerror[3][2], std4**2, std4 * std5 * self.rho_totalerror[3][4], std4 * std6 * self.rho_totalerror[3][5] ], [ std5 * std1 * self.rho_totalerror[4][0], std5 * std2 * self.rho_totalerror[4][1], std5 * std3 * self.rho_totalerror[4][2], std5 * std4 * self.rho_totalerror[4][3], std5**2, std5 * std6 * self.rho_totalerror[4][5] ], [ std6 * std1 * self.rho_totalerror[5][0], std6 * std2 * self.rho_totalerror[5][1], std6 * std3 * self.rho_totalerror[5][2], std6 * std4 * self.rho_totalerror[5][3], std6 * std5 * self.rho_totalerror[5][4], std6**2 ] ] # Matlab returns upper-triangular while Python returns # lower-triangular by default -- no need to transpose later! r_total_error = np.linalg.cholesky(s_total_error) y_total_error = np.random.normal(0, 1, 6) total_error = np.dot(r_total_error, y_total_error) # Generate randomize parameters in the standardnormal space: ui u1 = (self.beta1[0] + self.beta1[1] * (mag / 7.0) + self.beta1[2] * fault_type + self.beta1[3] * math.log(rrup / 25.0) + self.beta1[4] * math.log(vs30 / 750.0)) + total_error[0] u2 = (self.beta2[0] + self.beta2[1] * mag + self.beta2[2] * fault_type + self.beta2[3] * rrup + self.beta2[4] * vs30) + total_error[1] u3 = (self.beta3[0] + self.beta3[1] * mag + self.beta3[2] * fault_type + self.beta3[3] * rrup + self.beta3[4] * vs30) + total_error[2] u4 = (self.beta4[0] + self.beta4[1] * mag + self.beta4[2] * fault_type + self.beta4[3] * rrup + self.beta4[4] * vs30) + total_error[3] u5 = (self.beta5[0] + self.beta5[1] * mag + self.beta5[2] * fault_type + self.beta5[3] * rrup + self.beta5[4] * vs30) + total_error[4] u6 = (self.beta6[0] + self.beta6[1] * mag + self.beta6[2] * fault_type + self.beta6[3] * rrup + self.beta6[4] * vs30) + total_error[5] # Transform parameters ui from standardnormal to the physical space: # thetai (constraint: tmid < d_5_95, removed) theta1 = norm.ppf(norm.cdf(u1), -4.8255, 1.4318) theta2 = 5.0 + (45 - 5) * beta.ppf(norm.cdf(u2), 1.1314, 2.4474) theta3 = 0.5 + (40 - 0.5) * beta.ppf(norm.cdf(u3), 1.5792, 3.6405) theta4 = gamma.ppf(norm.cdf(u4), 4.0982, scale=1.4330) theta5 = self.slpinv(norm.cdf(u5), 17.095, 6.7729, 4.8512, -2, 0.5) theta6 = 0.02 + (1 - 0.02) * beta.ppf(norm.cdf(u6), 1.4250, 5.7208) sta_ai.append(math.exp(theta1)) sta_d595.append(theta2) sta_tmid.append(theta3) sta_wmid.append(theta4) sta_wslp.append(theta5) sta_zeta.append(theta6) # Write output to gmpe file out_file.write( "%s, %7.4f, %7.2f, " % (stat, rrup, vs30) + "%7.4f, %7.4f, %7.4f, %7.4f, %7.4f, %7.4f, " % (ai_mean, d595_mean, tmid_mean, wmid_mean, wslp_mean, zeta_mean) + "%7.4f, %7.4f, %7.4f, %7.4f, %7.4f, %7.4f\n" % (np.std(sta_ai), np.std(sta_d595), np.std(sta_tmid), np.std(sta_wmid), np.std(sta_wslp), np.std(sta_zeta))) ## Write output to file #sta_out_file = open(os.path.join(a_validation_outdir, # '%d.rzz2015gmpe.%s.txt' % # (self.sim_id, stat)), 'w') #sta_out_file.write("#ai(s.g^2), d595(s), tmid(s), " # "wmid(Hz), wslp(Hz/sec), zeta(ratio)\n") #for ai, d595, tmid, wmid, wslp, zeta in zip(sta_ai, sta_d595, # sta_tmid, sta_wmid, # sta_wslp, sta_zeta): # sta_out_file.write("%7.4f, %7.4f, %7.4f, %7.4f, %7.4f, %7.4f\n" % # (ai, d595, tmid, wmid, wslp, zeta)) #sta_out_file.close() # Generate Plots self.plot(stat, a_validation_outdir, rrup, fault_type, vs30, mag, sta_ai, sta_d595, sta_tmid, sta_wmid, sta_wslp, sta_zeta, ai_mean, d595_mean, tmid_mean, wmid_mean, wslp_mean, zeta_mean) # Close output file out_file.close() print("RZZ2015 GMPE Completed".center(80, '-'))
def main(): """ Parse command line options and create the needed files/directories """ # Detect BBP installation bbp_install = InstallCfg.getInstance() # Get GMPE group names gmpe_groups_available = gmpe_config.GMPES.keys() gmpe_groups_available_lc = [gmpe.lower() for gmpe in gmpe_groups_available] prog_base = os.path.basename(sys.argv[0]) usage = "usage: %s [options]" % (prog_base) parser = optparse.OptionParser(usage) parser.add_option("-c", "--codebase", type="string", action="store", dest="codebase", help="Codebase for the simulation: %s" % (CODEBASES)) parser.add_option("-e", "--event", type="string", action="store", dest="event", help="Validation event (should be configured in BBP)") parser.add_option("-d", "--dir", type="string", action="store", dest="simdir", help="Simulation directory") parser.add_option("--skip-rupgen", action="store_true", dest="skiprupgen", help="Skip the rupture generator, run only 1 simulation") parser.add_option("--hypo-rand", action="store_true", dest="hyporand", help="Enables hypocenter randomization") parser.add_option("--no-hypo-rand", action="store_false", dest="hyporand", help="Disables hypocenter randomization") parser.add_option("-n", "--num-simulations", type="int", action="store", dest="numsim", help="Number of simulations to run") parser.add_option("-w", "--walltime", type="int", action="store", dest="walltime", help="Number of hours for walltime") parser.add_option("--email", type="string", action="store", dest="email", help="Email for job notifications") parser.add_option("--new-nodes", action="store_true", dest="newnodes", help="Schedule the job in the new HPCC nodes") parser.add_option("--save-tmpdata", action="store_true", dest="savetemp", help="Save the contents of the tmpdata directory") parser.add_option("--only-rup", action="store_true", dest="only_rup", help="Only runs the rupture generator") parser.add_option("-g", "--gmpe-group", type="string", action="store", dest="gmpe_group_name", help="GMPE group: %s" % (gmpe_groups_available_lc)) parser.add_option("-a", "--all-metrics", action="store_true", dest="allmetrics", help="Calculate all metrics") parser.add_option("--var", "--variation", type="int", action="store", dest="variation", help="seed variation (default 1)") parser.add_option("--seg", "--segment", type="int", action="store", dest="segment", help="Indicates simulation part of multiseg run") parser.add_option("--first-seg-dir", type="string", action="store", dest="first_seg_dir", help="required for multi-segment segments 2..n") parser.add_option("-s", "--site", action="store_true", dest="site_response", help="Use site response module") (options, args) = parser.parse_args() # Check if using new HPCC nodes if options.newnodes: newnodes = True else: newnodes = False # Check if multi-segment simulation if options.segment: multiseg = True segment = options.segment else: multiseg = False # Check for first segment directory if options.first_seg_dir is not None: first_seg_dir = os.path.abspath(options.first_seg_dir) if not os.path.exists(first_seg_dir): print("First segment directory does not exist: %s" % (first_seg_dir)) sys.exit(1) else: first_seg_dir = None if multiseg and segment > 1: print("Must specify first segment directory!") sys.exit(1) # Check for variation sequence if options.variation: variation = options.variation else: if multiseg: # If a multisegment run, variation defaults to the segment number variation = segment else: # Otherwise, we use 1 variation = 1 # Check if user specified custom walltime if options.walltime: if options.walltime < 1: print("Walltime must be at least 1 hour!") sys.exit(1) walltime = options.walltime else: if newnodes: walltime = 24 else: walltime = 300 # Check if we need to calculate extra metrics if options.allmetrics: allmetrics = True else: allmetrics = False # Check if user wants to save the contents of tmpdata if options.savetemp: savetemp = True else: savetemp = False # Check if user wants to only run the rupture generator if options.only_rup: only_rup = True else: only_rup = False # Validate codebase to use codebase = options.codebase if codebase is None: print "Please specify a codebase!" sys.exit(1) codebase = codebase.lower() if codebase not in CODEBASES: print "Codebase needs to be one of: %s" % (CODEBASES) sys.exit(1) # Check if users wants to run site response module if options.site_response: site_response = True if codebase not in CODEBASES_SITE: print "Cannot use site response with method: %s" % (codebase) sys.exit(1) else: site_response = False # Check for event event = options.event if event is None: print "Please provide a validation event!" sys.exit(1) event_names = validation_cfg.VE_EVENTS.get_all_names() events = [v_event.lower() for v_event in event_names] if event.lower() not in events: print("Event %s does not appear to be properly configured on BBP" % (event)) print("Available options are: %s" % (event_names)) print("Please provide another event or check your BBP installation.") sys.exit(1) val_obj = validation_cfg.VE_EVENTS.get_event_by_print_name(event) # Check if we want to run the rupture generator skip_rupgen = options.skiprupgen # Check for hypocenter randomization if options.hyporand is None: print("Please specify --hypo-rand or --no-hypo-rand!") sys.exit(1) if options.hyporand: hypo_rand = True else: hypo_rand = False if not skip_rupgen: # Get source file try: source_file = val_obj.get_input(codebase, "source") except KeyError: print("Unable to get source file for event %s, codebase %s!" % (event, codebase)) sys.exit(1) if not source_file: print("Source file for event %s, codebase %s not specified!" % (event, codebase)) sys.exit(1) # Check for multisegment events if isinstance(source_file, str): source_file = source_file.strip() if multiseg: print("This event doesn't look like a multisegment event!") sys.exit(1) else: # Multisegment event if not multiseg: print("This is a multisegment event! Please specify segment!") sys.exit(1) source_file = source_file[segment - 1] else: # No need to get the source file, we start from the srf source_file = None try: srf_file = val_obj.get_input(codebase, "srf").strip() except KeyError: print("Event %s does not have a srf file for codebase %s!" % (event, codebase)) sys.exit(1) if not srf_file: print("Event %s does not have a srf file for codebase %s!" % (event, codebase)) sys.exit(1) # Force number of simulations to 1 options.numsim = 1 # Check for the simulation directory simdir = options.simdir if simdir is None: print "Please provide a simulation directory!" sys.exit(1) simdir = os.path.abspath(simdir) if os.path.exists(simdir): print "Simulation directory exists: %s" % (simdir) opt = raw_input("Do you want to delete its contents (y/n)? ") if opt.lower() != "y": print "Please provide another simulation directory!" sys.exit(1) opt = raw_input("ARE YOU SURE (y/n)? ") if opt.lower() != "y": print "Please provide another simulation directory!" sys.exit(1) # Delete existing directory (we already asked the user twice!!!) shutil.rmtree(simdir) # Pick up number of simulations to run numsim = options.numsim if numsim < 1 or numsim > MAX_SIMULATIONS: print("Number of simulations should be between 1 and %d" % (MAX_SIMULATIONS)) sys.exit(1) # Check for e-mail address email = options.email if email is None: print "Please provide an e-mail address for job notifications" sys.exit(1) # Figure out which gmpe group to use gmpe_group_name = options.gmpe_group_name if gmpe_group_name is not None: if not gmpe_group_name.lower() in gmpe_groups_available_lc: print "Invalid gmpe group name!" print "Options are: %s" % (gmpe_groups_available_lc) sys.exit(1) gmpe_group_index = gmpe_groups_available_lc.index( gmpe_group_name.lower()) gmpe_group_name = gmpe_groups_available[gmpe_group_index] # Make sure user has configured the setup_bbp_env.sh script setup_bbp_env = os.path.join(bbp_install.A_INSTALL_ROOT, "utils/batch/setup_bbp_env.sh") if not os.path.exists(setup_bbp_env): print("Cannot find setup_bbp_env.sh script!") print("Expected at: %s" % (setup_bbp_env)) sys.exit(1) # Create simulation directories prefix = "%s-%s" % (event.lower(), codebase.lower()) # Make sure we remove spaces from prefix (e.g. for the "Loma Prieta" event) prefix = prefix.replace(" ", '') os.makedirs(simdir) indir = os.path.join(simdir, "Sims", "indata") outdir = os.path.join(simdir, "Sims", "outdata") tmpdir = os.path.join(simdir, "Sims", "tmpdata") logsdir = os.path.join(simdir, "Sims", "logs") xmldir = os.path.join(simdir, "Xml") srcdir = os.path.join(simdir, "Src") for mdir in [indir, outdir, tmpdir, logsdir, xmldir, srcdir]: os.makedirs(mdir) # Generate source files if needed if source_file is not None: generate_src_files(numsim, source_file, srcdir, prefix, hypo_rand, variation, multiseg, segment, first_seg_dir) # Generate xml files generate_xml(bbp_install, numsim, srcdir, xmldir, logsdir, event, codebase, prefix, skip_rupgen, only_rup, gmpe_group_name, allmetrics, site_response, multiseg, segment) # Write pbs file write_pbs(bbp_install, numsim, simdir, xmldir, email, prefix, newnodes, walltime, savetemp) # Write .info file info_file = open(os.path.join(simdir, "%s.info" % (prefix)), 'w') info_file.write("# %s\n" % (" ".join(sys.argv))) info_file.close()
def main(): """ Parse command line options and create the needed files/directories """ # Detect BBP installation bbp_install = InstallCfg.getInstance() prog_base = os.path.basename(sys.argv[0]) usage = "usage: %s [options]" % (prog_base) parser = optparse.OptionParser(usage) parser.add_option("-c", "--codebase", type="string", action="store", dest="codebase", help="Codebase for the simulation: %s" % (CODEBASES)) parser.add_option("-v", "--velocity-model", type="string", action="store", dest="vmodel", help="Velocity model (region) for this simulation") parser.add_option("--src", "--source", type="string", action="store", dest="source", help="Source description file for the simulation") parser.add_option("--stl", "--station-list", type="string", action="store", dest="station_list", help="Station list file for the simulation") parser.add_option("-d", "--dir", type="string", action="store", dest="simdir", help="Simulation directory") parser.add_option("-n", "--num-stations", type="int", action="store", dest="numsta", help="Number of stations per run") parser.add_option("--seed", type="int", action="store", dest="new_seed", help="Overrides seed in SRC file") parser.add_option("--email", type="string", action="store", dest="email", help="Email for job notifications") parser.add_option("--new-nodes", action="store_true", dest="newnodes", help="Schedule the job in the new HPCC nodes") parser.add_option("--no-site-response", action="store_true", dest="nosite", help="Disable the site response module for GP/SDSU/UCSB") (options, _) = parser.parse_args() # Check if using new HPCC nodes if options.newnodes: newnodes = True else: newnodes = False # Check if not using site response if options.nosite: nosite = True else: nosite = False # Validate codebase to use codebase = options.codebase if codebase is None: print "Please specify a codebase!" sys.exit(1) codebase = codebase.lower() if codebase not in CODEBASES: print "Codebase needs to be one of: %s" % (CODEBASES) # Check for velocity model vmodel_names = velocity_models.get_all_names() vmodel = options.vmodel if vmodel is None: print "Please provide a velocity model (region) for this simulation!" print "Available options are: %s" % (vmodel_names) sys.exit(1) vmodels = [v_model.lower() for v_model in vmodel_names] if vmodel.lower() not in vmodels: print ("Velocity model %s does not appear to be available on BBP" % (vmodel)) print ("Available options are: %s" % (vmodel_names)) print "Please provide another velocity model or check your BBP installation." sys.exit(1) # Now get the name with the correct case vmodel = vmodel_names[vmodels.index(vmodel.lower())] # Get the source file source_file = options.source if source_file is None: print "Please provide a source description (src file)!" sys.exit(1) # Make it a full path source_file = os.path.realpath(source_file) # Make sure source file is in the rcf-104 filesystem if not "rcf-104" in source_file: print "Source file should be in the rcf-104 filesystem!" sys.exit(1) # Make sure source file exists and is readable if not os.path.isfile(source_file) or not os.access(source_file, os.R_OK): print "Source file does not seem to be accessible!" sys.exit(1) # Get the station list station_list = options.station_list if station_list is None: print "Please provide a station list (stl file)!" sys.exit(1) # Make it a full path station_list = os.path.realpath(station_list) # Make sure station list is in the rcf-104 filesystem if not "rcf-104" in station_list: print "Station list should be in the rcf-104 filesystem!" sys.exit(1) # Make sure station list exists and is readable if not os.path.isfile(station_list) or not os.access(station_list, os.R_OK): print "Station list foes not seem to be accessible!" sys.exit(1) # Check for the simulation directory simdir = options.simdir if simdir is None: print "Please provide a simulation directory!" sys.exit(1) simdir = os.path.abspath(simdir) if os.path.exists(simdir): print "Simulation directory exists: %s" % (simdir) opt = raw_input("Do you want to delete its contents (y/n)? ") if opt.lower() != "y": print "Please provide another simulation directory!" sys.exit(1) opt = raw_input("ARE YOU SURE (y/n)? ") if opt.lower() != "y": print "Please provide another simulation directory!" sys.exit(1) # Delete existing directory (we already asked the user twice!!!) shutil.rmtree(simdir) # Pick up number of simulations to run numsta = options.numsta if numsta < 1: print ("Number of stations should be greater than 0") sys.exit(1) # Check for user-provided seed for this simulation new_seed = options.new_seed # Check for e-mail address email = options.email if email is None: print "Please provide an e-mail address for job notifications" sys.exit(1) # Make sure user has configured the setup_bbp_env.sh script setup_bbp_env = os.path.join(bbp_install.A_INSTALL_ROOT, "utils/batch/setup_bbp_env.sh") if not os.path.exists(setup_bbp_env): print ("Cannot find setup_bbp_env.sh script!") print ("Expected at: %s" % (setup_bbp_env)) sys.exit(1) # Create simulation directories prefix = "%s-%s" % (os.path.splitext(os.path.basename(source_file))[0], codebase.lower()) # Make sure we remove spaces from prefix prefix = prefix.replace(" ", '') os.makedirs(simdir) indir = os.path.join(simdir, "Sims", "indata") outdir = os.path.join(simdir, "Sims", "outdata") tmpdir = os.path.join(simdir, "Sims", "tmpdata") logsdir = os.path.join(simdir, "Sims", "logs") xmldir = os.path.join(simdir, "Xml") srcdir = os.path.join(simdir, "Src") stldir = os.path.join(simdir, "Stl") for mdir in [indir, outdir, tmpdir, logsdir, xmldir, srcdir, stldir]: os.makedirs(mdir) # Generate station lists numsim, stlbase = generate_stl_files(station_list, numsta, stldir) if numsim > MAX_SIMULATIONS: print "Too many simulations requested!" print "Maximum number allowed is %d!" % (MAX_SIMULATIONS) print "Try requesting more stations per simulation..." sys.exit(1) # Generate source files generate_src_files(numsim, source_file, srcdir, prefix, new_seed) # Generate xml files generate_xml(bbp_install, numsim, srcdir, xmldir, logsdir, vmodel, codebase, prefix, stlbase, nosite) # Write pbs file write_pbs(bbp_install, numsim, simdir, xmldir, email, prefix, newnodes)
def run(self): """ Generate an index file in the outdata directory """ print("GenHTML".center(80, '-')) install = InstallCfg.getInstance() sim_id = self.sim_id a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id), "%d.genhtml.log" % (sim_id)) a_statfile = os.path.join(a_indir, self.r_stations) a_param_outdir = os.path.join(a_outdir, "param_files") a_param_statfile = os.path.join(a_param_outdir, self.r_stations) if self.r_src_file is not None and self.r_src_file != "": a_src_file = os.path.join(a_indir, self.r_src_file) a_param_srcfile = os.path.join(a_param_outdir, self.r_src_file) src_props = bband_utils.parse_properties(a_src_file) if "seed" in src_props: seed = src_props["seed"] else: seed = "not available" else: a_src_file = None a_param_srcfile = None # Make sure tmpdir, outdir exist dirs = [a_tmpdir, a_outdir, a_param_outdir] bband_utils.mkdirs(dirs, print_cmd=False) # Copy station list, srf_file to outdir's param_files directory shutil.copy2(a_statfile, a_param_statfile) if a_param_srcfile is not None: shutil.copy2(a_src_file, a_param_srcfile) # Get pointer to the velocity model object vel_obj = velocity_models.get_velocity_model_by_name(self.vmodel_name) if vel_obj is None: raise bband_utils.ParameterError("Cannot find velocity model: %s" % (self.vmodel_name)) vel_version = ("%s - %s" % (vel_obj.get_name(), vel_obj.get_version())) # Get pointer to validation object, if any val_version = None if self.val_name: val_obj = validation_cfg.VE_EVENTS.get_event_by_name(self.val_name) if val_obj is not None: val_version = ("%s - %s" % (val_obj.get_print_name(), val_obj.get_version())) # # Read and parse the station list with this call # slo = StationList(a_statfile) site_list = slo.getStationList() index_file = os.path.join(a_outdir, "index-%d.html" % (sim_id)) idxout = open(index_file, 'w') idxout.write("<html>\n") idxout.write("<title>Results for simulation %d</title>\n" % (sim_id)) idxout.write("<body>\n") idxout.write("<h2>Simulation Results</h2>\n") idxout.write("<table>\n") idxout.write("<tr>\n") idxout.write("<td>Broadband Version</td>\n") idxout.write("<td>%s</td>\n" % (install.VERSION)) idxout.write("</tr>\n") idxout.write("<tr>\n") idxout.write("<td>Velocity model version</td>\n") idxout.write("<td>%s</td>\n" % (vel_version)) idxout.write("</tr>\n") if val_version: idxout.write("<tr>\n") idxout.write("<td>Validation package version</td>\n") idxout.write("<td>%s</td>\n" % (val_version)) idxout.write("</tr>\n") if install.start_time is not None: idxout.write("<tr>\n") idxout.write("<td>Simulation Start Time</td>\n") idxout.write("<td>%s</td>\n" % (time.strftime("%a %d %b %Y %X %Z", install.start_time))) idxout.write("</tr>\n") idxout.write("<tr>\n") idxout.write("<td>Simulation End Time</td>\n") idxout.write("<td>%s</td>\n" % (time.strftime("%a %d %b %Y %X %Z", time.localtime()))) idxout.write("</tr>\n") idxout.write("<tr>\n") idxout.write("<td>Simulation ID</td>\n") idxout.write("<td>%d</td>\n" % (sim_id)) idxout.write("</tr>\n") idxout.write("<tr>\n") idxout.write("<td>Simulation Method</td>\n") idxout.write("<td>%s</td>\n" % (self.method)) idxout.write("</tr>\n") # Add xml file if os.path.exists(os.path.join(a_outdir, "%d.xml" % (sim_id))): idxout.write("<tr>\n") idxout.write("<td>Sim Spec</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "%d.xml" % (sim_id)), "%d.xml" % (sim_id))) idxout.write("</tr>\n") # Add station list and src_file if os.path.exists(os.path.join(a_param_outdir, self.r_stations)): idxout.write("<tr>\n") idxout.write("<td>Station List</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "param_files", self.r_stations), self.r_stations)) idxout.write("</tr>\n") if a_param_srcfile is not None: if os.path.exists(os.path.join(a_param_outdir, self.r_src_file)): idxout.write("<tr>\n") idxout.write("<td>Source Description</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "param_files", self.r_src_file), self.r_src_file)) idxout.write("</tr>\n") idxout.write("<tr>\n") idxout.write("<td>Random Seed</td>\n") idxout.write('<td>%s</td>\n' % (seed)) idxout.write("</tr>\n") # Get bias plots dist_lin_plot = glob.glob(os.path.join(a_outdir, "gof-dist-lin*.png")) dist_log_plot = glob.glob(os.path.join(a_outdir, "gof-dist-log*.png")) plots = glob.glob(os.path.join(a_outdir, "gof*.png")) rd50plot = glob.glob(os.path.join(a_outdir, "gof*-rd50.png")) gmpegofplot = glob.glob(os.path.join(a_outdir, "gof*-GMPE-*.png")) mapgofplot = glob.glob(os.path.join(a_outdir, "gof-map-*.png")) if len(gmpegofplot) == 1: gmpegofplot = gmpegofplot[0] else: gmpegofplot = "" if len(mapgofplot) == 1: mapgofplot = mapgofplot[0] else: mapgofplot = "" if len(dist_lin_plot) == 1: dist_lin_plot = dist_lin_plot[0] else: dist_lin_plot = "" if len(dist_log_plot) == 1: dist_log_plot = dist_log_plot[0] else: dist_log_plot = "" if len(rd50plot) == 1: rd50plot = rd50plot[0] else: if gmpegofplot: rd50plot = [plot for plot in rd50plot if plot != gmpegofplot] if mapgofplot: rd50plot = [plot for plot in rd50plot if plot != mapgofplot] if dist_lin_plot: rd50plot = [plot for plot in rd50plot if plot != dist_lin_plot] if dist_log_plot: rd50plot = [plot for plot in rd50plot if plot != dist_log_plot] if len(rd50plot) == 1: rd50plot = rd50plot[0] else: rd50plot = "" if len(plots) > 1: rspplot = [plot for plot in plots if (plot != rd50plot and plot != gmpegofplot and plot != mapgofplot and plot != dist_lin_plot and plot != dist_log_plot)] if len(rspplot) == 1: rspplot = rspplot[0] else: rspplot = "" else: rspplot = "" gmpegofplot = os.path.basename(gmpegofplot) mapgofplot = os.path.basename(mapgofplot) rd50plot = os.path.basename(rd50plot) rspplot = os.path.basename(rspplot) dist_lin_plot = os.path.basename(dist_lin_plot) dist_log_plot = os.path.basename(dist_log_plot) # Add RotD50 bias plot if rd50plot: idxout.write("<tr>\n") idxout.write("<td>RotD50 Bias Plot</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "%s" % (rd50plot)), "PNG")) idxout.write("</tr>\n") if mapgofplot: idxout.write("<tr>\n") idxout.write("<td>RotD50 Map GOF Plot</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "%s" % (mapgofplot)), "PNG")) idxout.write("</tr>\n") # Add RSP bias plot if rspplot: idxout.write("<tr>\n") idxout.write("<td>Respect Bias Plot</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "%s" % (rspplot)), "PNG")) idxout.write("</tr>\n") # Add the GMPE bias plot if gmpegofplot: idxout.write("<tr>\n") idxout.write("<td>GMPE Comparison Bias Plot</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "%s" % (gmpegofplot)), "PNG")) idxout.write("</tr>\n") # Add distance plots if dist_lin_plot: idxout.write("<tr>\n") idxout.write("<td>RotD50 Dist Bias Linear</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "%s" % (dist_lin_plot)), "PNG")) idxout.write("</tr>\n") if dist_log_plot: idxout.write("<tr>\n") idxout.write("<td>RotD50 Dist Bias Log</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "%s" % (dist_log_plot)), "PNG")) idxout.write("</tr>\n") # Add station map and kml file if os.path.exists(os.path.join(a_outdir, "station_map.png")): idxout.write("<tr>\n") idxout.write("<td>Station Map</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "station_map.png"), "PNG")) if os.path.exists(os.path.join(a_outdir, "station_map.kml")): idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", "station_map.kml"), "KML")) idxout.write("</tr>\n") # Now get SRF file and plot srfs = glob.glob(os.path.join(a_outdir, "*.srf")) if len(srfs) == 1: srffile = os.path.basename(srfs[0]) srfplot = ("%s.png" % (os.path.basename(os.path.splitext(srffile)[0]))) if not os.path.exists(os.path.join(a_outdir, srfplot)): srfplot = "" else: srffile = "" srfplot = "" if srffile: idxout.write("<tr>\n") idxout.write("<td>Rupture file</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", srffile), "data")) if srfplot: idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", srfplot), "PNG")) idxout.write("</tr>\n") idxout.write("</table>\n") idxout.write("<p><p>\n") for sits in site_list: site = sits.scode idxout.write("<p>\n") idxout.write("<h2>%s</h2>\n" % (site)) idxout.write("<table>\n") # Find all files velfile = "%d.%s.vel.bbp" % (sim_id, site) velplot = "%d.%s_velocity_seis.png" % (sim_id, site) accfile = "%d.%s.acc.bbp" % (sim_id, site) accplot = "%d.%s_acceleration_seis.png" % (sim_id, site) rd50file = "%d.%s.rd50" % (sim_id, site) rspfile = "%d.%s.rsp" % (sim_id, site) rd50plot = glob.glob(os.path.join(a_outdir, "*_%d_%s_rotd50.png" % (sim_id, site))) if len(rd50plot) == 1: rd50plot = os.path.basename(rd50plot[0]) else: rd50plot = "" rspplot = glob.glob(os.path.join(a_outdir, "*_%d_%s_rsp.png" % (sim_id, site))) if len(rspplot) == 1: rspplot = os.path.basename(rspplot[0]) else: rspplot = "" overlayfile = glob.glob(os.path.join(a_outdir, "*_%d_%s_overlay.png" % (sim_id, site))) if len(overlayfile) == 1: overlayfile = os.path.basename(overlayfile[0]) else: overlayfile = "" gmpeplot = glob.glob(os.path.join(a_outdir, "*_%d_%s_gmpe.png" % (sim_id, site))) if len(gmpeplot) == 1: gmpeplot = os.path.basename(gmpeplot[0]) else: gmpeplot = "" if os.path.exists(os.path.join(a_outdir, velfile)): idxout.write("<tr>\n") idxout.write("<td>Velocity</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", velfile), "BBP")) if os.path.exists(os.path.join(a_outdir, velplot)): idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", velplot), "PNG")) idxout.write("</tr>\n") if os.path.exists(os.path.join(a_outdir, accfile)): idxout.write("<tr>\n") idxout.write("<td>Acceleration</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", accfile), "BBP")) if os.path.exists(os.path.join(a_outdir, accplot)): idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", accplot), "PNG")) idxout.write("</tr>\n") if os.path.exists(os.path.join(a_outdir, rd50file)): idxout.write("<tr>\n") idxout.write("<td>RotD50</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", rd50file), "data")) if rd50plot: idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", rd50plot), "PNG")) idxout.write("</tr>\n") if os.path.exists(os.path.join(a_outdir, rspfile)): idxout.write("<tr>\n") idxout.write("<td>Respect</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", rspfile), "data")) if rspplot: idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", rspplot), "PNG")) idxout.write("</tr>\n") if overlayfile: idxout.write("<tr>\n") idxout.write("<td>Overlay</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", overlayfile), "PNG")) idxout.write("</tr>\n") if gmpeplot: idxout.write("<tr>\n") idxout.write("<td>GMPE Plot</td>\n") idxout.write('<td><a href="%s">%s</a></td>\n' % (os.path.join(".", gmpeplot), "PNG")) idxout.write("</tr>\n") idxout.write("</table>\n") idxout.write("</body>\n") idxout.write("</html>\n") idxout.close() print("==> Wrote file: %s" % (index_file)) print("GenHTML Completed".center(80, '-'))
def __init__(self): install = InstallCfg.getInstance() # Name and Path to executable self.COMPS = ["000", "090", "ver"] self.INPUT_FORMAT_GOF = "GOF" # Path to GOF binaries self.GOF_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_MO") self.GOF_SpFit_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_SpFit") self.GOF_PGX_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_PGX") self.GOF_CCFit_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_CCFit") self.GOF_DCumEn_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_DCumEn") self.GOF_FSComp_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_FSComp") self.GOF_InElFit_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_InElFit") self.GOF_SAFit16_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_SAFit16") self.GOF_SpecDurFit_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_SpecDurFit") self.GOF_NGA_BIN = os.path.join(install.A_SDSU_BIN_DIR, "GOF_MO_NGA") # Working file names self.PARAM_DAT_FILE = "PARAM.dat" self.INPUT_SEISMO_1 = "CONCAT_1" self.INPUT_SEISMO_2 = "CONCAT_2" # Summary file self.SUMMARY_FILE = "gof_summary.txt" self.cfggof = {} self.cfggof["input_format"] = "GOF" self.cfggof["input_set_1"] = "./sample1_1.input" self.cfggof["input_set_2"] = "./sample1_2.input" self.cfggof["output_dir"] = "./output" self.cfggof["work_dir"] = "./work" self.cfggof["use_nga"] = False self.cfggof["num_headers"] = 0 #number of headers preceeding each seismogram self.cfggof["num_station"] = 1 #number of stations self.cfggof["timesteps_set_1"] = 0 #nt number of timesteps in set 1 self.cfggof["timesteps_set_2"] = 0 #nt number of timesteps in set 2 self.cfggof["input_param"] = "A" #Input format(A,V,D), (meter, sec) self.cfggof["seismo_length"] = 0 #length of seismograms (seconds) self.cfggof["low_cut"] = 0.1 #Low cut (period) self.cfggof["high_cut"] = 10.0 #High cut (period) self.cfggof["weights"] = dict() self.cfggof["weights"]["pga"] = 1.0 # Weighting on PGA self.cfggof["weights"]["pgv"] = 1.0 # Weighting on PGV self.cfggof["weights"]["pgd"] = 1.0 # Weighting on PGD self.cfggof["weights"]["psa"] = 1.0 # Weighting on PSA self.cfggof["weights"]["spectral_Fit"] = 1.0 # Weighting on Spectral Fit self.cfggof["weights"]["cumulative_energy_fit"] = 1.0 # Weighting on Cumulative Energy Fit self.cfggof["weights"]["inelastic_elastic_fit"] = 1.0 # Weighting on Inelastic/Elastic Fit (16) self.cfggof["weights"]["sepctral_acc"] = 1.0 # Weighting on Spec Acc (16) self.cfggof["weights"]["spec_duration"] = 1.0 # Weighting on Spec Dur (16) self.cfggof["weights"]["data_energy_release_duration"] = 1.0 # Weighting on Data Energy Release Duration (5%-75%) self.cfggof["weights"]["cross_correlation"] = 1.0 # Weighting on Cross-Correlation self.cfggof["weights"]["fourier_spectrum"] = 1.0 # Weighting on Fourier Spectrum self.cfggof["file"] = dict() self.cfggof["file"]["pga"] = "GOF_PGA.list" self.cfggof["file"]["pgv"] = "GOF_PGV.list" self.cfggof["file"]["pgd"] = "GOF_PGD.list" self.cfggof["file"]["psa"] = "GOF_PSA.list" self.cfggof["file"]["spectral_Fit"] = "GOF_SPECFIT.list" self.cfggof["file"]["cumulative_energy_fit"] = "GOF_ENERGYFIT.list" self.cfggof["file"]["inelastic_elastic_fit"] = "GOF_InElEl.list" self.cfggof["file"]["sepctral_acc"] = "GOF_SAFIT.list" self.cfggof["file"]["spec_duration"] = "GOF_SPECDUR.list" self.cfggof["file"]["data_energy_release_duration"] = "GOF_DUR.list" self.cfggof["file"]["cross_correlation"] = "GOF_CROSSCOR.list" self.cfggof["file"]["fourier_spectrum"] = "GOF_FS.list" self.cfgnga = {} self.cfgnga["source_mag"] = 0.0 self.cfgnga["dip"] = 0.0 self.cfgnga["rake"] = 0.0 self.cfgnga["depth_coseismic"] = 0.0 self.cfgnga["site_file"] = ""
def run(self): """ This function prepares the parameters for UW Site response and then calls it """ print("Nonlinear Site Response Analysis".center(80, '-')) install = InstallCfg.getInstance() sim_id = self.sim_id a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id)) a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) a_statlist = os.path.join(a_indir, self.r_stations) slo = StationList(a_statlist) site_list = slo.getStationList() for idx,site in enumerate(site_list): print("==> Running nonlinear site response for station: %s" % (site.scode)) # the velocity files for this site vel_file = os.path.join(a_outdir, "%d.%s.vel.bbp" % (sim_id, site.scode)) log_file = os.path.join(a_logdir, "%d.%s.siteresponse.log" % (sim_id, site.scode)) progstring = ("%s " % (os.path.join(install.A_UW_BIN_DIR, "siteresponse")) + "%s " % (self.r_locfile[idx]) + "-bbp " + "%s " % (vel_file)+ "%s " % (a_tmpdir)+ "%s " % (log_file)) bband_utils.runprog(progstring) # copy results to the output directory tmp_acc_file = os.path.join(a_tmpdir, 'surface.acc') out_acc_file = os.path.join(a_outdir, "%d.%s.surf.acc.bbp" % (sim_id, site.scode)) out_acc_png = os.path.join(a_outdir, "%d.%s.surf.acc.png" % (sim_id, site.scode)) self.convert_srt_to_bbp(tmp_acc_file, out_acc_file, "acc") tmp_vel_file = os.path.join(a_tmpdir, 'surface.vel') out_vel_file = os.path.join(a_outdir, "%d.%s.surf.vel.bbp" % (sim_id, site.scode)) out_vel_png = os.path.join(a_outdir, "%d.%s.surf.vel.png" % (sim_id, site.scode)) self.convert_srt_to_bbp(tmp_vel_file, out_vel_file, "vel") # tmp_dsp_file = os.path.join(a_tmpdir, 'surface.disp') # out_dsp_file = os.path.join(a_outdir, "%d.%s.surf.disp.bbp" % # (sim_id, site.scode)) # out_dsp_png = os.path.join(a_outdir, "%d.%s.surf.disp.png" % # (sim_id, site.scode)) # self.convert_srt_to_bbp(tmp_dsp_file, out_dsp_file, "disp") # plot seismograms at surface plot_seismograms.plot_seis(site.scode, out_acc_file, sim_id, 'acc', out_acc_png) plot_seismograms.plot_seis(site.scode, out_vel_file, sim_id, 'vel', out_vel_png)
""" print("Usage: %s <command> <options>" % os.path.basename(sys.argv[0])) print() print("Available commands:") print(" plot <bbp_in> <png_out> - plots bbp_in, generating png_out") print(" comp <bbp1> <bbp2> <png_out> - plots bbp1 and bbp2 into png_out") print(" integrate <acc_bbp> <vel_bbp> - acc_bbp -> integration -> vel_bbp") print(" diff <vel_bbp> <acc_bbp> - vel_bbp -> diff -> acc_bbp") print() sys.exit(0) # ------------------------------------------------------------------------------ # Main # ------------------------------------------------------------------------------ INSTALL = InstallCfg.getInstance() # Check if at least 1 parameter if len(sys.argv) < 2: usage() # Get command CMD = sys.argv[1].lower() if CMD == "plot": if len(sys.argv) < 4: usage() plot(sys.argv[2], sys.argv[3]) elif CMD == "comp": if len(sys.argv) < 5: usage() comp(sys.argv[2], sys.argv[3], sys.argv[4]) elif CMD == "integrate" or CMD == "int":
def run(self): """ This function in the main entry point for this module. It runs the gp_gof component. """ print("GP GoF".center(80, '-')) # Initialize basic variables self.install = InstallCfg.getInstance() self.config = GPGofCfg() install = self.install config = self.config sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id), "%d.gp_gof.log" % (sim_id)) # Input, tmp, and output directories a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_outdir_seis = os.path.join(install.A_OUT_DATA_DIR, str(sim_id), "obs_seis_%s" % (sta_base)) a_outdir_gmpe = os.path.join(install.A_OUT_DATA_DIR, str(sim_id), "gmpe_data_%s" % (sta_base)) # Source file, parse it! a_srcfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), self.r_srcfile) self.src_keys = bband_utils.parse_src_file(a_srcfile) # Station file a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), self.r_stations) # List of observed seismogram files filelist = os.listdir(a_outdir_seis) slo = StationList(a_statfile) site_list = slo.getStationList() # check cutoff value if self.max_cutoff is None: self.max_cutoff = config.MAX_CDST print_header_rd50 = 1 # Remove rd50 resid file rd50_resid_output = os.path.join(a_outdir, "%s-%d.rd50-resid.txt" % (self.comp_label, sim_id)) if os.path.exists(rd50_resid_output): os.remove(rd50_resid_output) for site in site_list: slon = float(site.lon) slat = float(site.lat) stat = site.scode # Now process rd50 files expected_rd50_file = os.path.join(a_outdir, "%d.%s.rd50" % (sim_id, stat)) if not os.path.exists(expected_rd50_file): # just skip it print("Skipping rotd50/psa5 for station %s..." % (stat)) continue # See if the rd50 file exist for comparison. If it doesn't # exist, skip this station rd50_file = None if ("%s.rd50" % (stat)) in filelist: rd50_file = "%s.rd50" % (stat) else: # Skip this station continue # Calculate Rrup origin = (self.src_keys['lon_top_center'], self.src_keys['lat_top_center']) dims = (self.src_keys['fault_length'], self.src_keys['dlen'], self.src_keys['fault_width'], self.src_keys['dwid'], self.src_keys['depth_to_top']) mech = (self.src_keys['strike'], self.src_keys['dip'], self.src_keys['rake']) site_geom = [float(site.lon), float(site.lat), 0.0] (fault_trace1, up_seis_depth, low_seis_depth, ave_dip, dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech) _, rrup, _ = putils.DistanceToSimpleFaultSurface(site_geom, fault_trace1, up_seis_depth, low_seis_depth, ave_dip) # Create path names and check if their sizes are within bounds datafile1 = os.path.join(a_outdir_seis, rd50_file) simfile1 = os.path.join(a_outdir, "%d.%s.rd50" % (sim_id, stat)) outfile = os.path.join(a_outdir, "%s-%d.rd50-resid.txt" % (self.comp_label, self.sim_id)) bband_utils.check_path_lengths([datafile1, simfile1, outfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s/gen_resid_tbl_3comp bbp_format=1 " % (install.A_GP_BIN_DIR) + "datafile1=%s simfile1=%s " % (datafile1, simfile1) + "comp1=psa5n comp2=psa5e comp3=rotd50 " + "eqname=%s mag=%s stat=%s lon=%.4f lat=%.4f " % (self.comp_label, self.mag, stat, slon, slat) + "vs30=%d cd=%.2f " % (site.vs30, rrup) + "flo=%f fhi=%f " % (site.low_freq_corner, site.high_freq_corner) + "print_header=%d >> %s 2>> %s" % (print_header_rd50, outfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # Only need to print header the first time if print_header_rd50 == 1: print_header_rd50 = 0 # Finished per station processing, now summarize and plot the data if os.path.exists(rd50_resid_output): self.summarize_rotd50(site_list, a_outdir, a_outdir_gmpe) print("GP GoF Completed".center(80, '-'))
def run(self): """ Runs the Anderson GoF code """ print("Anderson GoF".center(80, '-')) # Load configuration, set sim_id install = InstallCfg.getInstance() sim_id = self.sim_id # Build directory paths a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id)) a_validation_outdir = os.path.join(a_outdir, "validations", "anderson_gof") # Make sure the output and tmp directories exist bband_utils.mkdirs([a_tmpdir, a_indir, a_outdir, a_validation_outdir], print_cmd=False) # Now the file paths self.log = os.path.join(a_logdir, "%d.anderson.log" % (sim_id)) sta_file = os.path.join(a_indir, self.stations) sta_base = os.path.basename(os.path.splitext(self.stations)[0]) sims_dir = a_outdir obs_dir = os.path.join(a_tmpdir, "obs_seis_%s" % (sta_base)) # Start with first record irec = 0 # Read station list slo = StationList(sta_file) site_list = slo.getStationList() # Figure out station names station_names = [] for station in site_list: station_names.append(station.scode) # Loop over stations for site in site_list: station = site.scode print("==> Processing station: %s" % (station)) file_sims_acc = os.path.join(sims_dir, "%d.%s.acc.bbp" % (sim_id, station)) file_sims_rd50 = os.path.join(sims_dir, "%d.%s.rd50" % (sim_id, station)) lowcut = site.low_freq_corner highcut = site.high_freq_corner #print(lowcut, highcut) (sims_acc_org_time, sims_acc_org_ns, sims_acc_org_ew, sims_acc_org_ver) = np.genfromtxt(file_sims_acc, skip_header=2, dtype='float64', unpack='TRUE') (sims_perd, sims_rd50_ns, sims_rd50_ew, sims_rd50_ver) = np.genfromtxt(file_sims_rd50, skip_header=2, dtype='float64', unpack='TRUE') file_obs_acc = os.path.join(obs_dir, "%s.bbp" % (station)) file_obs_rd50 = os.path.join(obs_dir, "%s.rd50" % (station)) (obs_acc_org_time, obs_acc_org_ns, obs_acc_org_ew, obs_acc_org_ver) = np.genfromtxt(file_obs_acc, skip_header=2, dtype='float64', unpack='TRUE') (obs_perd, obs_rd50_ns, obs_rd50_ew, obs_rd50_ver) = np.genfromtxt(file_obs_rd50, skip_header=2, dtype='float64', unpack='TRUE') # Intitialize the rd50 arrays RD50PER = len(obs_perd) rd1 = np.zeros(RD50PER) rd2 = np.zeros(RD50PER) rd3 = np.zeros(RD50PER) rd4 = np.zeros(RD50PER) # Resample and align the time series (obs_acc_time, obs_acc_ew, sims_acc_time, sims_acc_ew) = self.align_seismograms(obs_acc_org_time, obs_acc_org_ew, sims_acc_org_time, sims_acc_org_ew) (obs_acc_time, obs_acc_ns, sims_acc_time, sims_acc_ns) = self.align_seismograms(obs_acc_org_time, obs_acc_org_ns, sims_acc_org_time, sims_acc_org_ns) obs_org_dt = obs_acc_org_time[1] - obs_acc_org_time[0] sims_org_dt = sims_acc_org_time[1] - sims_acc_org_time[0] obs_dt = obs_acc_time[1] - obs_acc_time[0] sims_dt = sims_acc_time[1] - sims_acc_time[0] if obs_dt == sims_dt: self.dt = obs_dt fs = 1. / self.dt fnyq = 0.5 * fs # Compute the number of pads for the time series # to have equal number of points for the fft # and for criteria 1 and 2. (sims_acc_ns, sims_acc_ew, obs_acc_ns, obs_acc_ew, ndata) = self.smcpadf(sims_acc_ns, sims_acc_ew, obs_acc_ns, obs_acc_ew, self.dt, lowcut, 8, highcut, 8, 'FALSE') # Start the loop for the different frequency bands for iband in range(len(self.B)): f1 = self.B[iband][0] f2 = self.B[iband][1] # Do the job only if the frequency band is within # the filtered band and if fnyq is higher than f1 if f1 >= lowcut and f2 <= highcut and fnyq >= f2: #print("Working on Period Band :", iband + 1, # "[", 1. / f2, 1. / f1, "]") T1 = 1. / f1 T2 = 1. / f2 t_tmp = sims_perd[(sims_perd <= T1) & (T2 <= sims_perd)] acc_1_flt = self.butter_bandpass(f1, f2, fnyq, sims_acc_ns, 2) acc_2_flt = self.butter_bandpass(f1, f2, fnyq, sims_acc_ew, 2) acc_3_flt = self.butter_bandpass(f1, f2, fnyq, obs_acc_ns, 2) acc_4_flt = self.butter_bandpass(f1, f2, fnyq, obs_acc_ew, 2) # Work on the frequency domain # Do the response spectra # Save the rsp for the specific frequency band rd1 = sims_rd50_ns[(sims_perd <= T1) & (T2 <= sims_perd)] rd2 = sims_rd50_ew[(sims_perd <= T1) & (T2 <= sims_perd)] rd3 = obs_rd50_ns[(obs_perd <= T1) & (T2 <= obs_perd)] rd4 = obs_rd50_ew[(obs_perd <= T1) & (T2 <= obs_perd)] self.C8[irec, iband] = np.nanmean( [self.c8_eval(rd1, rd3, t_tmp), self.c8_eval(rd2, rd4, t_tmp)]) # Now the FFT # Compute the FFT frequencies F = np.fft.fftfreq(ndata, self.dt) # Compute the fft and the amplitudes fft_1 = np.fft.fft(sims_acc_ns) fft_1 = fft_1[(0. <= F) & (f1 <= F) & (F <= f2)] fft_2 = np.fft.fft(sims_acc_ew) fft_2 = fft_2[(0. <= F) & (f1 <= F) & (F <= f2)] fft_3 = np.fft.fft(obs_acc_ns) fft_3 = fft_3[(0. <= F) & (f1 <= F) & (F <= f2)] fft_4 = np.fft.fft(obs_acc_ew) fft_4 = fft_4[(0. <= F) & (f1 <= F) & (F <= f2)] # Slice the FFT frequencies for the working frequency band F = F[(f1 <= F) & (F <= f2)] fs1 = abs(fft_1) / len(fft_1) fs2 = abs(fft_2) / len(fft_2) fs3 = abs(fft_3) / len(fft_3) fs4 = abs(fft_4) / len(fft_4) self.C9[irec, iband] = np.nanmean( [self.c9_eval(fs1, fs3, F), self.c9_eval(fs2, fs4, F)]) # Work on the time domain """ # Compute the site corrected accelerograms acc_3_scor = np.fft.ifft(fft_3) acc_4_scor = np.fft.ifft(fft_4) # These do not need filtering because I'm working # in the sliced frequency domain acc_3_flt = abs(acc_3_scor) acc_4_flt = abs(acc_4_scor) """ vel_1 = self.integ(acc_1_flt, self.dt) vel_2 = self.integ(acc_2_flt, self.dt) vel_3 = self.integ(acc_3_flt, self.dt) vel_4 = self.integ(acc_4_flt, self.dt) dis_1 = self.integ(vel_1, self.dt) dis_2 = self.integ(vel_2, self.dt) dis_3 = self.integ(vel_3, self.dt) dis_4 = self.integ(vel_4, self.dt) c11, c31 = self.c13_eval(acc_1_flt, acc_3_flt) c12, c32 = self.c13_eval(acc_2_flt, acc_4_flt) c21, c41 = self.c24_eval(vel_1, vel_3) c22, c42 = self.c24_eval(vel_2, vel_4) self.C1[irec, iband] = np.nanmean( np.array(c11, c12)) self.C2[irec, iband] = np.nanmean( np.array(c21, c22)) self.C3[irec, iband] = np.nanmean( np.array(c31, c32)) self.C4[irec, iband] = np.nanmean( np.array(c41, c42)) self.C5[irec, iband] = np.nanmean( [self.c5_eval(acc_1_flt, acc_3_flt), self.c5_eval(acc_2_flt, acc_4_flt)]) self.C6[irec, iband] = np.nanmean( [self.c6_eval(vel_1, vel_3), self.c6_eval(vel_2, vel_4)]) self.C7[irec, iband] = np.nanmean( [self.c7_eval(dis_1, dis_3), self.c7_eval(dis_2, dis_4)]) self.C10[irec, iband] = np.nanmean( [self.c10_eval(acc_1_flt, acc_3_flt), self.c10_eval(acc_2_flt, acc_4_flt)]) #print(self.C1[irec, iband], # self.C2[irec, iband], # self.C3[irec, iband], # self.C4[irec, iband], # self.C5[irec, iband], # self.C6[irec, iband], # self.C7[irec, iband], # self.C8[irec, iband], # self.C9[irec, iband], # self.C10[irec, iband]) self.S1[irec] = np.nanmean( np.array([np.nanmean(self.C1[irec, :]), np.nanmean(self.C2[irec, :]), np.nanmean(self.C3[irec, :]), np.nanmean(self.C4[irec, :]), np.nanmean(self.C5[irec, :]), np.nanmean(self.C6[irec, :]), np.nanmean(self.C7[irec, :]), np.nanmean(self.C8[irec, :]), np.nanmean(self.C9[irec, :]), np.nanmean(self.C10[irec, :])])) output_file = os.path.join(a_validation_outdir, "gof-%s-%d-anderson-%s.txt" % (self.eventname, self.sim_id, station)) out_file = open(output_file, 'w') line = ('#%s%5s%4s%4s%4s%4s%4s%4s%4s%4s%4s\n' % ('band', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'C10')) out_file.write(line) for i in range(self.BMAX): line = ('%s %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f\n' % (self.BNAMES[i], self.C1[irec, i], self.C2[irec, i], self.C3[irec, i], self.C4[irec, i], self.C5[irec, i], self.C6[irec, i], self.C7[irec, i], self.C8[irec, i], self.C9[irec, i], self.C10[irec, i])) out_file.write(line) out_file.close() output_file = os.path.join(a_validation_outdir, "gof-%s-%d-anderson-%s.png" % (self.eventname, self.sim_id, station)) self.cplots(irec, station, output_file) print('===> Station score :', "{:3.1f}".format(self.S1[irec])) irec = irec + 1 print('==> Total number of stations processed: %d' % (irec)) self.C1 = self.C1[0:irec, :] self.C2 = self.C2[0:irec, :] self.C3 = self.C3[0:irec, :] self.C4 = self.C4[0:irec, :] self.C5 = self.C5[0:irec, :] self.C6 = self.C6[0:irec, :] self.C7 = self.C7[0:irec, :] self.C8 = self.C8[0:irec, :] self.C9 = self.C9[0:irec, :] self.C10 = self.C10[0:irec, :] self.S1 = self.S1[0:irec] c1conf = [self.statts(self.C1[:, i]) for i in range(self.BMAX)] c2conf = [self.statts(self.C2[:, i]) for i in range(self.BMAX)] c3conf = [self.statts(self.C3[:, i]) for i in range(self.BMAX)] c4conf = [self.statts(self.C4[:, i]) for i in range(self.BMAX)] c5conf = [self.statts(self.C5[:, i]) for i in range(self.BMAX)] c6conf = [self.statts(self.C6[:, i]) for i in range(self.BMAX)] c7conf = [self.statts(self.C7[:, i]) for i in range(self.BMAX)] c8conf = [self.statts(self.C8[:, i]) for i in range(self.BMAX)] c9conf = [self.statts(self.C9[:, i]) for i in range(self.BMAX)] c10conf = [self.statts(self.C10[:, i]) for i in range(self.BMAX)] s1_event = np.nanmean(self.S1) print('==> Overall event score:', "{:3.1f}".format(s1_event)) output_file = os.path.join(a_validation_outdir, '%d.gof_anderson.%s.txt' % (self.sim_id, self.eventname)) out_file = open(output_file, 'w') line = ('#%s%5s%4s%4s%4s%4s%4s%4s%4s%4s%4s\n' % ('band', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'C10')) out_file.write(line) for i in range(self.BMAX): line = ('%s %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f\n' % (self.BNAMES[i], c1conf[i][0], c2conf[i][0], c3conf[i][0], c4conf[i][0], c5conf[i][0], c6conf[i][0], c7conf[i][0], c8conf[i][0], c9conf[i][0], c10conf[i][0])) out_file.write(line) out_file.close() output_file = os.path.join(a_validation_outdir, "gof-%s-%d-anderson-summary.png" % (self.eventname, self.sim_id)) self.fplots(s1_event, np.asarray(c1conf), np.asarray(c2conf), np.asarray(c3conf), np.asarray(c4conf), np.asarray(c5conf), np.asarray(c6conf), np.asarray(c7conf), np.asarray(c8conf), np.asarray(c9conf), np.asarray(c10conf), output_file) print("Anderson GoF Completed".center(80, '-'))
def main(): """ Parse command line options and create the needed files/directories """ # Detect BBP installation bbp_install = InstallCfg.getInstance() prog_base = os.path.basename(sys.argv[0]) usage = "usage: %s [options]" % (prog_base) parser = optparse.OptionParser(usage) parser.add_option("-c", "--codebase", type="string", action="store", dest="codebase", help="Codebase for the simulation: %s" % (CODEBASES)) parser.add_option("-v", "--velocity-model", type="string", action="store", dest="vmodel", help="Velocity model (region) for this simulation") parser.add_option("--src", "--source", type="string", action="store", dest="source", help="Source description file for the simulation") parser.add_option("--stl", "--station-list", type="string", action="store", dest="station_list", help="Station list file for the simulation") parser.add_option("--srf", "--srf-prefix", type="string", action="store", dest="srf_prefix", help="Prefix of SRF files to use, " "only for GP, SDSU and UCSB methods. " "Simulations begin after the rupture generator.") parser.add_option("-d", "--dir", type="string", action="store", dest="simdir", help="Simulation directory") parser.add_option("--hypo-rand", action="store_true", dest="hyporand", help="Enables hypocenter randomization") parser.add_option("--no-hypo-rand", action="store_false", dest="hyporand", help="Disables hypocenter randomization") parser.add_option("-n", "--num-simulations", type="int", action="store", dest="numsim", help="Number of simulations to run") parser.add_option("--email", type="string", action="store", dest="email", help="Email for job notifications") parser.add_option("-w", "--walltime", type="int", action="store", dest="walltime", help="Number of hours for walltime") parser.add_option("--new-nodes", action="store_true", dest="newnodes", help="Schedule the job in the new HPCC nodes") parser.add_option("--save-tmpdata", action="store_true", dest="savetemp", help="Save the contents of the tmpdata directory") parser.add_option("--hdd-min", type="float", action="store", dest="hdd_min", help="Min value for hypo down dip in randomization") parser.add_option("--hdd-max", type="float", action="store", dest="hdd_max", help="Max value for hypo down dip in randomization") parser.add_option("--has-min", type="float", action="store", dest="has_min", help="Min value for hypo along strike in randomization") parser.add_option("--has-max", type="float", action="store", dest="has_max", help="Max value for hypo along strike in randomization") parser.add_option("--only-rup", action="store_true", dest="only_rup", help="Only runs the rupture generator") parser.add_option("--var", "--variation", type="int", action="store", dest="variation", help="seed variation (default 1)") parser.add_option("--multiseg", action="store_true", dest="multiseg", help="Indicates simulation part of multiseg run") parser.add_option("--first-seg-dir", type="string", action="store", dest="first_seg_dir", help="required for multi-segment segments 2..n") parser.add_option("-s", "--site", action="store_true", dest="site_response", help="Use site response module") (options, _) = parser.parse_args() # Check if using new HPCC nodes if options.newnodes: newnodes = True else: newnodes = False # Check if multi-segment simulation if options.multiseg: multiseg = True else: multiseg = False # Check for first segment directory if options.first_seg_dir is not None: first_seg_dir = os.path.abspath(options.first_seg_dir) if not os.path.exists(first_seg_dir): print("First segment directory for exists: %s" % (first_seg_dir)) sys.exit(1) else: first_seg_dir = None # Check if user specified custom walltime if options.walltime: if options.walltime < 1: print("Walltime must be at least 1 hour!") sys.exit(1) walltime = options.walltime else: if newnodes: walltime = 24 else: walltime = 300 # Check for variation sequence if options.variation: variation = options.variation else: variation = 1 # Check if user wants to save the contents of tmpdata if options.savetemp: savetemp = True else: savetemp = False # Check if user wants to only run the rupture generator if options.only_rup: only_rup = True else: only_rup = False # Validate codebase to use codebase = options.codebase if codebase is None: print "Please specify a codebase!" sys.exit(1) codebase = codebase.lower() if codebase not in CODEBASES: print "Codebase needs to be one of: %s" % (CODEBASES) # Check for velocity model vmodel_names = velocity_models.get_all_names() vmodel = options.vmodel if vmodel is None: print "Please provide a velocity model (region) for this simulation!" print "Available options are: %s" % (vmodel_names) sys.exit(1) vmodels = [v_model.lower() for v_model in vmodel_names] if vmodel.lower() not in vmodels: print ("Velocity model %s does not appear to be available on BBP" % (vmodel)) print ("Available options are: %s" % (vmodel_names)) print "Please provide another velocity model or check your BBP installation." sys.exit(1) # Now get the name with the correct case vmodel = vmodel_names[vmodels.index(vmodel.lower())] # Check if users wants to run site response module if options.site_response: site_response = True if codebase not in CODEBASES_SITE: print "Cannot use site response with method: %s" % (codebase) sys.exit(1) else: site_response = False # Check for hypocenter randomization if options.hyporand is None: print "Please specify --hypo-rand or --no-hypo-rand!" sys.exit(1) if options.hyporand: hypo_rand = True else: hypo_rand = False # Define area where hypocenter will be randomized hypo_area = {} hypo_area["hdd_min"] = options.hdd_min hypo_area["hdd_max"] = options.hdd_max hypo_area["has_min"] = options.has_min hypo_area["has_max"] = options.has_max # Get the source file (SRC or SRFs) source_file = options.source srf_prefix = options.srf_prefix if source_file is None and srf_prefix is None: print ("Please provide either source description (src file) " "or a srf prefix!") sys.exit(1) # If user specified both a source file and a srf prefix, we abort! if source_file is not None and srf_prefix is not None: print "Cannot specify both srf_prefic and source_file!" sys.exit(1) # If user specified a source file if source_file is not None: # Make it a full path source_file = os.path.realpath(source_file) # Make sure source file is in the rcf-104 / scec-00 filesystem if not "rcf-104" in source_file and not "scec-00" in source_file: print "Source file should be in the rcf-104 / scec-00 filesystems!" sys.exit(1) # Make sure source file exists and is readable if (not os.path.isfile(source_file) or not os.access(source_file, os.R_OK)): print "Source file does not seem to be accessible!" sys.exit(1) # Create a prefix prefix = ("%s-%s" % (os.path.splitext(os.path.basename(source_file))[0], codebase.lower())) # If user specified a SRF prefix if srf_prefix is not None: # Make it a full path srf_prefix = os.path.realpath(srf_prefix) # Make sure source file is in the rcf-104 or scec-00 filesystems if not "rcf-104" in srf_prefix and not "scec-00" in srf_prefix: print "SRF files should be in the rcf-104 / scec-00 filesystems!" sys.exit(1) # Create a prefix prefix = os.path.splitext(os.path.basename(srf_prefix))[0] #prefix = prefix.rsplit("-", 1)[0] # Make sure we remove spaces from prefix prefix = prefix.replace(" ", '') # Get the station list station_list = options.station_list if station_list is None: print "Please provide a station list (stl file)!" sys.exit(1) # Make it a full path station_list = os.path.realpath(station_list) # Make sure station list is in the rcf-104 or scec-00 filesystems if not "rcf-104" in station_list and not "scec-00" in station_list: print "Station list should be in the rcf-104 / scec-00 filesystems!" sys.exit(1) # Make sure station list exists and is readable if (not os.path.isfile(station_list) or not os.access(station_list, os.R_OK)): print "Station list foes not seem to be accessible!" sys.exit(1) # Check for the simulation directory simdir = options.simdir if simdir is None: print "Please provide a simulation directory!" sys.exit(1) simdir = os.path.abspath(simdir) if os.path.exists(simdir): print "Simulation directory exists: %s" % (simdir) opt = raw_input("Do you want to delete its contents (y/n)? ") if opt.lower() != "y": print "Please provide another simulation directory!" sys.exit(1) opt = raw_input("ARE YOU SURE (y/n)? ") if opt.lower() != "y": print "Please provide another simulation directory!" sys.exit(1) # Delete existing directory (we already asked the user twice!!!) shutil.rmtree(simdir) # Pick up number of simulations to run numsim = options.numsim if numsim < 1 or numsim > MAX_SIMULATIONS: print ("Number of simulations should be between 1 and %d" % (MAX_SIMULATIONS)) sys.exit(1) # Check for e-mail address email = options.email if email is None: print "Please provide an e-mail address for job notifications" sys.exit(1) # Make sure user has configured the setup_bbp_env.sh script setup_bbp_env = os.path.join(bbp_install.A_INSTALL_ROOT, "utils/batch/setup_bbp_env.sh") if not os.path.exists(setup_bbp_env): print ("Cannot find setup_bbp_env.sh script!") print ("Expected at: %s" % (setup_bbp_env)) sys.exit(1) # Create simulation directories os.makedirs(simdir) indir = os.path.join(simdir, "Sims", "indata") outdir = os.path.join(simdir, "Sims", "outdata") tmpdir = os.path.join(simdir, "Sims", "tmpdata") logsdir = os.path.join(simdir, "Sims", "logs") xmldir = os.path.join(simdir, "Xml") srcdir = os.path.join(simdir, "Src") for mdir in [indir, outdir, tmpdir, logsdir, xmldir, srcdir]: os.makedirs(mdir) if srf_prefix is None: # Generate source files generate_src_files(numsim, source_file, srcdir, prefix, hypo_rand, hypo_area, variation, multiseg, first_seg_dir) # Generate xml files generate_xml(bbp_install, numsim, srcdir, xmldir, logsdir, vmodel, codebase, prefix, station_list, only_rup, srf_prefix, site_response) # Write pbs file write_pbs(bbp_install, numsim, simdir, xmldir, email, prefix, newnodes, walltime, savetemp) # Write .info file info_file = open(os.path.join(simdir, "%s.info" % (prefix)), 'w') info_file.write("# %s\n" % (" ".join(sys.argv))) info_file.close()
def run(self): """ Extracts needed seismograms from the bin file """ print("SDSU Seismograms".center(80, '-')) install = InstallCfg.getInstance() sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_tmpdir_mod = os.path.join(install.A_TMP_DATA_DIR, str(sim_id), "sdsu_seismograms_%s" % (sta_base)) binfile = os.path.join(a_indir, self.r_binfile) # # Make sure the output and tmp directories exist # bband_utils.mkdirs([a_tmpdir, a_tmpdir_mod, a_outdir], print_cmd=False) a_full_stations = os.path.join(a_indir, self.r_full_stations) a_stations = os.path.join(a_indir, self.r_stations) # Copy station files to the tmpdir_mod directory cmd = "cp %s %s" % (a_full_stations, os.path.join(a_tmpdir_mod, self.r_full_stations)) bband_utils.runprog(cmd) cmd = "cp %s %s" % (a_stations, os.path.join(a_tmpdir_mod, self.r_stations)) bband_utils.runprog(cmd) # # Make sure path names are within the limits accepted by the # Fortran code # if len(binfile) >= bband_utils.SDSU_MAX_FILENAME: raise ValueError("binfile is %d characters long, maximum is %d" % (len(binfile), bband_utils.SDSU_MAX_FILENAME)) old_cwd = os.getcwd() os.chdir(a_tmpdir_mod) # Get number of stations in seismogram file, this is a # variation of the code in station_list.py stat_names = {} num_stations = 0 stat_fp = open(a_full_stations, 'r') for line in stat_fp: if line.startswith('#'): continue sta = line.split() if len(sta) >= 3: scode = sta[2] num_stations = num_stations + 1 stat_names[scode] = num_stations stat_fp.close() # Create list of stations to save slo = StationList(a_stations) site_list = slo.getStationList() save_stat_names = [] for stat in site_list: save_stat_names.append(stat.scode) # Convert to bbp format cmd = "%s/bin2bbp %s %d" % (install.A_SDSU_BIN_DIR, binfile, len(stat_names)) bband_utils.runprog(cmd) # Copy over the names for stat in save_stat_names: if not stat in stat_names: continue sta_id = stat_names[stat] shutil.copy2("%s/%d.bbp" % (a_tmpdir_mod, sta_id), "%s/%d.%s-lf.bbp" % (a_tmpdir, sim_id, stat)) del stat_names[stat] # Delete the ones you don't need for stat in stat_names.keys(): os.remove("%s/%d.bbp" % (a_tmpdir_mod, stat_names[stat])) os.chdir(old_cwd) print("SDSU Seismograms Completed".center(80, '-'))
def run(self): print("Generating Plots".center(80, '-')) # Initialize basic variables install = InstallCfg.getInstance() sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id), "%d.gen_plots.log" % (sim_id)) # Input, tmp, and output directories a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) a_tmpdir_seis = os.path.join(install.A_TMP_DATA_DIR, str(sim_id), "obs_seis_%s" % (sta_base)) # Station file a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id), self.r_stations) # List of observed seismogram files filelist = os.listdir(a_tmpdir_seis) slo = StationList(a_statfile) site_list = slo.getStationList() for site in site_list: stat = site.scode # Look for the files we need bbpfile = os.path.join(a_tmpdir_seis, "%s.bbp" % stat) expected_file = os.path.join(a_outdir, "%d.%s.vel.bbp" % (sim_id, stat)) if (not os.path.exists(expected_file) or not os.path.exists(bbpfile)): # just skip this station continue print("==> Plotting seismogram comparison for station: %s" % (stat)) if self.format == 'vel': # We have velocity, nothing we need to do filename1 = bbpfile elif self.format == 'acc': # We have acceleration, must integrate first # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "temp.acc.000") ewfile = os.path.join(a_tmpdir, "temp.acc.090") udfile = os.path.join(a_tmpdir, "temp.acc.ver") bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s/wcc2bbp " % (install.A_GP_BIN_DIR) + "nsfile=%s ewfile=%s udfile=%s " % (nsfile, ewfile, udfile) + "wcc2bbp=0 < %s >> %s 2>&1" % (bbpfile, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) for comp in ['000', '090', 'ver']: # Create path names and check if their sizes are # within bounds filein = os.path.join(a_tmpdir, "temp.acc.%s" % (comp)) fileout = os.path.join(a_tmpdir, "temp.vel.%s" % (comp)) bband_utils.check_path_lengths([filein, fileout], bband_utils.GP_MAX_FILENAME) cmd = ("%s/integ_diff integ=1 " % (install.A_GP_BIN_DIR) + "filein=%s fileout=%s >> %s 2>&1" % (filein, fileout, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) # Create path names and check if their sizes are within bounds nsfile = os.path.join(a_tmpdir, "temp.vel.000") ewfile = os.path.join(a_tmpdir, "temp.vel.090") udfile = os.path.join(a_tmpdir, "temp.vel.ver") vel_bbp_file = os.path.join(a_tmpdir, "temp.%s.vel" % stat) bband_utils.check_path_lengths([nsfile, ewfile, udfile], bband_utils.GP_MAX_FILENAME) cmd = ("%s/wcc2bbp wcc2bbp=1 " % install.A_GP_BIN_DIR + "nsfile=%s ewfile=%s udfile=%s > %s 2>> %s" % (nsfile, ewfile, udfile, vel_bbp_file, self.log)) bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False) filename1 = vel_bbp_file # Generate arias duration files for calculated data calc_acc = os.path.join(a_outdir, "%d.%s.acc.bbp" % (sim_id, stat)) calc_peer_n = os.path.join(a_tmpdir, "%d.%s_N.acc" % (sim_id, stat)) calc_peer_e = os.path.join(a_tmpdir, "%d.%s_E.acc" % (sim_id, stat)) calc_peer_z = os.path.join(a_tmpdir, "%d.%s_Z.acc" % (sim_id, stat)) # Convert calculated acc seismogram into peer format bbp_formatter.bbp2peer(calc_acc, calc_peer_n, calc_peer_e, calc_peer_z) # Now calculate arias duration for each component for comp in ["N", "E", "Z"]: file_in = os.path.join(a_tmpdir, "%d.%s_%s.acc" % (sim_id, stat, comp)) file_out = os.path.join( a_tmpdir, "%d.%s_%s.arias" % (sim_id, stat, comp)) arias_duration.ad_from_acc(file_in, file_out) # Generate arias duration files for observed data obs_acc = os.path.join(a_tmpdir_seis, "%s.bbp" % stat) obs_peer_n = os.path.join(a_tmpdir, "obs.%s_N.acc" % (stat)) obs_peer_e = os.path.join(a_tmpdir, "obs.%s_E.acc" % (stat)) obs_peer_z = os.path.join(a_tmpdir, "obs.%s_Z.acc" % (stat)) # Convert observed acc seismogram into peer format bbp_formatter.bbp2peer(obs_acc, obs_peer_n, obs_peer_e, obs_peer_z) # Now calculate arias duration for each component for comp in ["N", "E", "Z"]: file_in = os.path.join(a_tmpdir, "obs.%s_%s.acc" % (stat, comp)) file_out = os.path.join(a_tmpdir, "obs.%s_%s.arias" % (stat, comp)) arias_duration.ad_from_acc(file_in, file_out) # Plot seismograms with arias duration filename2 = os.path.join(a_outdir, "%d.%s.vel.bbp" % (sim_id, stat)) outfile = os.path.join( a_outdir, "%s_%d_%s_overlay.png" % (self.comp_label, sim_id, stat)) obs_arias_n = os.path.join(a_tmpdir, "obs.%s_N.arias" % (stat)) obs_arias_e = os.path.join(a_tmpdir, "obs.%s_E.arias" % (stat)) obs_arias_z = os.path.join(a_tmpdir, "obs.%s_Z.arias" % (stat)) calc_arias_n = os.path.join(a_tmpdir, "%d.%s_N.arias" % (sim_id, stat)) calc_arias_e = os.path.join(a_tmpdir, "%d.%s_E.arias" % (sim_id, stat)) calc_arias_z = os.path.join(a_tmpdir, "%d.%s_Z.arias" % (sim_id, stat)) plot_seismograms.plot_overlay_with_arias( stat, filename1, filename2, obs_arias_n, obs_arias_e, obs_arias_z, calc_arias_n, calc_arias_e, calc_arias_z, self.comp_label, "run %d" % sim_id, outfile) # Now create rd50 comparison plots for site in site_list: stat = site.scode print("==> Plotting RotD50 comparison for station: %s" % (stat)) # Now process rd50 files expected_rd50_file = os.path.join(a_outdir, "%d.%s.rd50" % (sim_id, stat)) if not os.path.exists(expected_rd50_file): # just skip it print("Skipping rotd50/psa5 for station %s..." % (stat)) continue # See if .rd50 file exists for comparison. If it don't # exist, skip it rd50_file = None if ("%s.rd50" % (stat)) in filelist: rd50_file = "%s.rd50" % (stat) else: # Skip this station continue # Plot rotd50 results rd50_filename1 = os.path.join(a_tmpdir_seis, rd50_file) rd50_filename2 = os.path.join(a_outdir, "%d.%s.rd50" % (sim_id, stat)) outfile = os.path.join( a_outdir, "%s_%d_%s_rotd50.png" % (self.comp_label, sim_id, stat)) plot_rotd50.plot_rd50(stat, rd50_filename1, rd50_filename2, self.comp_label, sim_id, outfile, site.low_freq_corner, site.high_freq_corner, quiet=True) print("Generating Plots Completed".center(80, '-'))
def run(self): """ Runs the UCSB Syn1D simulator """ print("UCSB Syn1D".center(80, '-')) # # Global installation parameters # install = InstallCfg.getInstance() # # Required inputs are sim_id, the src file, the FFSP output # and station list # sim_id = self.sim_id sta_base = os.path.basename(os.path.splitext(self.r_stations)[0]) self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id), "%d.syn1d_%s.log" % (sim_id, sta_base)) self.a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id)) self.a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id)) a_tmpdir_lf = os.path.join(install.A_TMP_DATA_DIR, str(sim_id), "syn1D_lf_%s" % (sta_base)) a_tmpdir_hf = os.path.join(install.A_TMP_DATA_DIR, str(sim_id), "syn1D_hf_%s" % (sta_base)) a_tmpdir_stitch = os.path.join(install.A_TMP_DATA_DIR, str(sim_id), "stitch_%s" % (sta_base)) a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id)) # # Make sure the output and tmp directories exist # bband_utils.mkdirs([self.a_tmpdir, a_tmpdir_lf, a_tmpdir_hf, a_tmpdir_stitch, a_outdir], print_cmd=False) # Parse SRC file a_srcfile = os.path.join(self.a_indir, self.r_srcfile) self.cfg = Syn1DCfg(self.vmodel_name, a_srcfile) # Read station list a_stations = os.path.join(self.a_indir, self.r_stations) print(a_stations) self.slo = StationList(a_stations) site_list = self.slo.getStationList() # Make sure syn1D can handle our station list if len(site_list) > self.cfg.MAX_STATIONS: raise bband_utils.ParameterError("Too many stations in " "the station list: %d. " % (len(site_list)) + "Maximum limit is %d." % (self.cfg.MAX_STATIONS)) # Run Syn1D for LF print("Running Syn1D for LF...") self.run_syn1d(a_tmpdir_lf, self.cfg.A_UC_LF_VELMODEL, self.cfg.A_UC_GREENBANK, self.cfg.A_UC_GREEN_SOIL, self.cfg.A_UC_SYN1D_INP_FILE) # Run Syn1D for HF print("Running Syn1D for HF...") self.run_syn1d(a_tmpdir_hf, self.cfg.A_UC_HF_VELMODEL, self.cfg.A_UC_HF_GREENBANK, self.cfg.A_UC_HF_GREEN_SOIL, self.cfg.A_UC_SYN1D_INP_FILE) # Run Stitch to combine LF and HF print("Running Stitch...") self.run_stitch(a_tmpdir_stitch, a_tmpdir_lf, a_tmpdir_hf, self.cfg.A_UC_LF_VELMODEL) # # Convert the outputs to BB format # # Copy station list ll to the stitch directory r_station_file = "stations.ll" shutil.copy2(os.path.join(a_tmpdir_lf, r_station_file), os.path.join(a_tmpdir_stitch, r_station_file)) # Save old directory old_cwd = os.getcwd() os.chdir(a_tmpdir_stitch) cmd = "%s >> %s 2>&1" % (self.cfg.A_CONV, self.log) bband_utils.runprog(cmd) # Restore old directory os.chdir(old_cwd) # # Move the results to the tmpdir directory. Use the stations # list to determine the names of the output file the system # should have produced. Define an output name for each # station BB file. Read each line in the file as a station. # for stat in site_list: a_tmpfile = os.path.join(a_tmpdir_stitch, "%s.3comp" % (stat.scode)) expected_file = os.path.join(self.a_tmpdir, "%d.%s.bbp" % (sim_id, stat.scode)) shutil.copy2(a_tmpfile, expected_file) if self.r_srcfile == "": # calculate magnitude and write to file mag = fault_utils.get_magnitude(os.path.join(self.a_indir, self.r_velmodel), os.path.join(self.a_indir, self.r_srffile), sta_base) mag_file = open(os.path.join(self.a_indir, "magnitude_%s" % (sta_base)), 'w') mag_file.write("%.2f" % mag) mag_file.flush() mag_file.close() print("UCSB Syn1D Completed".center(80, '-'))