def merge_seismograms(input_sims, station_list,
                      merged_outdir, realization):
    """
    Adds seismograms from multiple simulations, creating a set              
    of merged seismograms
    """
    # Load station list
    slo = StationList(station_list)
    site_list = slo.getStationList()

    # Merge each station
    for station in site_list:
        print("==> Merging station: %s" % (station.scode))
        # Merge both velocity and acceleration
        for file_type in ['vel', 'acc']:
            input_files = []
            for input_sim in input_sims:
                input_dir = os.path.join(input_sim, "Sims",
                                         "outdata", realization)
                input_file = os.path.join(input_dir,
                                          "%s.%s.%s.bbp" %
                                          (realization,
                                           station.scode,
                                           file_type))
                input_files.append(input_file)
                output_file = os.path.join(merged_outdir,
                                           "%s.%s.%s.bbp" %
                                           (realization,
                                            station.scode,
                                            file_type))
                add_bbp_seismograms(input_files, output_file)
Example #2
0
    def build_station_list(self, station_file):
        work_dir = os.getcwd()
        proj = self.projobj
        sfname = os.path.splitext(os.path.basename(station_file))[0]
        fname = '%s/%s.txt' % (work_dir, sfname)
        sfile = open(fname, 'w')
        stats = []
        # a_statfile = (self.install.A_IN_DATA_DIR +
        #               "/%d/%s"%(self.sim_id,self.station_file))
        slo = StationList(self.station_file)
        site_list = slo.getStationList()

        for sites in site_list:
            slon = float(sites.lon)
            slat = float(sites.lat)
            site = sites.scode
            x, y = proj.get_xy_from_geo(slon, slat)
            # if x < 0 or y >0:
            #     print "Station oob :", slon, slat, x, y
            stat_data = (x, y)
            stats.append(stat_data)
            sfile.write("%-12s\t%f\t%f\t%f\t%f\n" % (site, slon, slat, x, y))
        self.stats = stats

        # Hypo
        if self.hypo != [] and self.hypo != None:
            self.hypo[0], self.hypo[1] = proj.get_xy_from_geo(self.hypo[0],
                                                              self.hypo[1])

        sfile.close()
        return
Example #3
0
    def merge_seismograms(self):
        """
        Adds seismograms from multiple simulations, creating a set
        of merged seismograms
        """
        # Load station list
        slo = StationList(self.station_list)
        site_list = slo.getStationList()

        # Merge each station
        for station in site_list:
            print("==> Merging station: %s" % (station.scode))
            # Merge both velocity and acceleration
            for file_type in ['vel', 'acc']:
                input_files = []
                for sim_id in self.input_sims:
                    input_dir = os.path.join(self.install.A_OUT_DATA_DIR,
                                             str(sim_id))
                    input_file = os.path.join(input_dir,
                                              "%s.%s.%s.bbp" %
                                              (str(sim_id),
                                               station.scode,
                                               file_type))
                    input_files.append(input_file)
                output_file = os.path.join(self.a_outdir,
                                           "%s.%s.%s.bbp" %
                                           (str(self.output_sim_id),
                                            station.scode,
                                            file_type))
                self.add_bbp_seismograms(input_files, output_file)
Example #4
0
def merge_seismograms(input_sims, station_list, merged_outdir, realization):
    """
    Adds seismograms from multiple simulations, creating a set
    of merged seismograms
    """
    # Load station list
    slo = StationList(station_list)
    site_list = slo.getStationList()

    # Merge each station
    for station in site_list:
        print("==> Merging station: %s" % (station.scode))
        # Merge both velocity and acceleration
        for file_type in ['vel', 'acc']:
            input_files = []
            for input_sim in input_sims:
                input_dir = os.path.join(input_sim, "Sims", "outdata",
                                         realization)
                input_file = os.path.join(
                    input_dir,
                    "%s.%s.%s.bbp" % (realization, station.scode, file_type))
                input_files.append(input_file)
                output_file = os.path.join(
                    merged_outdir,
                    "%s.%s.%s.bbp" % (realization, station.scode, file_type))
                add_bbp_seismograms(input_files, output_file)
Example #5
0
    def build_station_list(self, station_file):
        work_dir = os.getcwd()
        proj = self.projobj
        sfname = os.path.splitext(os.path.basename(station_file))[0]
        fname = '%s/%s.txt' % (work_dir, sfname)
        sfile = open(fname, 'w')
        stats = []
        # a_statfile = (self.install.A_IN_DATA_DIR +
        #               "/%d/%s"%(self.sim_id,self.station_file))
        slo = StationList(self.station_file)
        site_list = slo.getStationList()

        for sites in site_list:
            slon = float(sites.lon)
            slat = float(sites.lat)
            site = sites.scode
            x, y = proj.get_xy_from_geo(slon, slat)
            # if x < 0 or y >0:
            #     print "Station oob :", slon, slat, x, y
            stat_data = (x, y)
            stats.append(stat_data)
            sfile.write("%-12s\t%f\t%f\t%f\t%f\n" % (site, slon, slat, x, y))
        self.stats = stats

        # Hypo
        if self.hypo != [] and self.hypo != None:
            self.hypo[0], self.hypo[1] = proj.get_xy_from_geo(
                self.hypo[0], self.hypo[1])

        sfile.close()
        return
    def post_process(self):
        """
        Run the standard BBP post-processing tasks
        """
        # Plot seismograms
        plotter = PlotSeis(os.path.basename(self.station_list),
                           os.path.basename(self.src_files[0]),
                           True, True, self.output_sim_id)
        plotter.run()
        # RotD50
        process = RotD50(os.path.basename(self.station_list),
                         self.output_sim_id)
        process.run()
        # Plot RotD50

        # Load station list
        slo = StationList(self.station_list)
        site_list = slo.getStationList()

        for site in site_list:
            stat = site.scode
            rd50_file = "%d.%s.rd50" % (self.output_sim_id, stat)
            rd50_filename1 = os.path.join(self.a_outdir, rd50_file)
            outfile = os.path.join(self.a_outdir, "%s_%d_%s_rotd50.png" %
                                   (self.scenario, self.output_sim_id, stat))
            plot_rotd50.plot_rd50(stat, rd50_filename1, "-",
                                  self.scenario, "-", outfile,
                                  site.low_freq_corner,
                                  site.high_freq_corner,
                                  quiet=True)
Example #7
0
    def run(self):
        """
        Corrects the amplitudes from all stations found in the station
        list according to the correction coefficients provided by the user
        """
        print("Correct PSA".center(80, '-'))

        # Initialize basic variables
        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        self.log = os.path.join(install.A_OUT_LOG_DIR,
                                str(sim_id),
                                "%d.obs_seis.log" %
                                (sim_id))

        # Input, tmp, and output directories
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))

        # Station file
        a_statfile = os.path.join(a_indir, self.r_stations)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Go through each station
        # print "Working dir: %s" % (self.proc_dir)
        for site in site_list:
            stat = site.scode
            print("==> Correcting amplitudes for station: %s" % (stat))
            self.correct_station(stat, self.extension)

        print("Correct PSA Completed".center(80, '-'))
Example #8
0
def main():
    """
    Main function, parses command-line options, calls plotting code
    """
    params = parse_arguments()

    slo = StationList(params["station_list"])
    station_list = slo.getStationList()

    for site in station_list:
        station = site.scode
        print("==> Processing data for station: %s" % (station))

        filename = "%s%s" % (station, params["extension"])
        file1 = os.path.join(params["dir1"], filename)
        if not os.path.exists(file1):
            file1 = os.path.join(params["dir1"],
                                 "%s.%s" %
                                 (os.path.basename(os.path.normpath(params["dir1"])),
                                  filename))
        file2 = os.path.join(params["dir2"], filename)
        if not os.path.exists(file2):
            file2 = os.path.join(params["dir2"],
                                 "%s.%s" %
                                 (os.path.basename(os.path.normpath(params["dir2"])),
                                  filename))
        outfile = os.path.join(params["output_dir"],
                               "%s-%s-comparison.png" %
                               (station, params["output"]))
        params["station"] = station
        params["vs30"] = site.vs30
        plot_comparison(file1, file2, outfile, params)
Example #9
0
def gp_subset(in_file1, in_format1, in_file2, outfile):
    """
    Takes two input stat files in a given format and outputs
    an intersection file in GP format
    """
    #Get a station list from in_file1
    if in_format1 == 'GP' or in_format1 == 'UCSB':
        stat_list = StationList(in_file1).getStationList()
        stat_names = [stat.scode for stat in stat_list]
    elif in_format1 == 'SDSU':
        stat_file_fp = open(in_file1, 'r')
        data = stat_file_fp.readlines()
        stat_file_fp.close()
        for i in range(0, len(data)):
            pieces = data[i].split()
            if len(pieces) > 1:
                if pieces[1] == 'X':
                    break
        stat_names = []
        for j in range(i + 1, len(data)):
            pieces = data[j].split()
            stat_names.append(pieces[2])
    else:
        raise bband_utils.ParameterError("Format %s is not supported." %
                                         (in_format1))

    # Use the station list to subset in_file2
    intersect_list = []
    stat2_list = StationList(in_file2).getStationList()
    for stat2 in stat2_list:
        for stat1 in stat_names:
            if stat2.scode == stat1:
                intersect_list.append(stat2)
    StationList.build(intersect_list, outfile)
Example #10
0
def post_process(station_list, src_files,
                 merged_outdir,
                 realization, scenario):
    """
    Run the standard BBP post-processing tasks
    """
    # Plot seismograms
    plotter = PlotSeis(os.path.basename(station_list),
                       os.path.basename(src_files[0]),
                       True, True, int(realization))
    plotter.run()
    # RotD50
    process = RotD50(os.path.basename(station_list),
                     int(realization))
    process.run()

    # Plot RotD50
    
    # Load station list
    slo = StationList(station_list)
    site_list = slo.getStationList()

    for site in site_list:
        stat = site.scode
        rd50_file = "%d.%s.rd50" % (int(realization), stat)
        rd50_filename1 = os.path.join(merged_outdir, rd50_file)
        outfile = os.path.join(merged_outdir, "%s_%d_%s_rotd50.png" %
                               (scenario, int(realization), stat))
        plot_rotd50.plot_rd50(stat, rd50_filename1, "-",
                              scenario, "-", outfile,
                              site.low_freq_corner,
                              site.high_freq_corner,
                              quiet=True)
Example #11
0
    def run(self):
        """
        Corrects the amplitudes from all stations found in the station
        list according to the correction coefficients provided by the user
        """
        print("Correct PSA".center(80, '-'))

        # Initialize basic variables
        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id),
                                "%d.obs_seis.log" % (sim_id))

        # Input, tmp, and output directories
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))

        # Station file
        a_statfile = os.path.join(a_indir, self.r_stations)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Go through each station
        # print "Working dir: %s" % (self.proc_dir)
        for site in site_list:
            stat = site.scode
            print("==> Correcting amplitudes for station: %s" % (stat))
            self.correct_station(stat, self.extension)

        print("Correct PSA Completed".center(80, '-'))
Example #12
0
    def setUp(self):
        self.install = InstallCfg()
        self.stations = "nr_v13_3_1.stl"
        self.eventname = "NR"
        self.sim_id = int(seqnum.get_seq_num())
        sta_base = os.path.basename(os.path.splitext(self.stations)[0])
        sim_id = self.sim_id

        # Set up paths
        a_indir = os.path.join(self.install.A_IN_DATA_DIR, str(sim_id))
        a_tmpdir = os.path.join(self.install.A_TMP_DATA_DIR, str(sim_id))
        a_tmpdir_seis = os.path.join(self.install.A_TMP_DATA_DIR,
                                     str(sim_id),
                                     "obs_seis_%s" % (sta_base))
        a_outdir = os.path.join(self.install.A_OUT_DATA_DIR, str(sim_id))
        a_logdir = os.path.join(self.install.A_OUT_LOG_DIR, str(sim_id))
        a_validation_outdir = os.path.join(a_outdir, "validations",
                                           "rzz2015")

        # Create directories
        bband_utils.mkdirs([a_indir, a_tmpdir, a_tmpdir_seis,
                            a_outdir, a_logdir, a_validation_outdir])

        # Copy station list
        cmd = "cp %s %s" % (os.path.join(self.install.A_TEST_REF_DIR,
                                         "rzz2015", self.stations),
                            a_indir)
        bband_utils.runprog(cmd)

        # Read station list
        slo = StationList(os.path.join(a_indir, self.stations))
        site_list = slo.getStationList()

        # Loop over stations
        for site in site_list:
            station = site.scode
            src_sims_acc = os.path.join(self.install.A_TEST_REF_DIR,
                                        "rzz2015", "syn_seis",
                                        "%s.acc.bbp" % (station))
            dst_sims_acc = os.path.join(a_outdir, "%d.%s.acc.bbp" %
                                        (sim_id, station))
            src_obs_acc = os.path.join(self.install.A_TEST_REF_DIR,
                                       "rzz2015", "obs_seis",
                                       "%s.bbp" % (station))
            dst_obs_acc = os.path.join(a_tmpdir_seis, "%s.bbp" %
                                       (station))

            cmd = "cp %s %s" % (src_sims_acc, dst_sims_acc)
            bband_utils.runprog(cmd)

            cmd = "cp %s %s" % (src_obs_acc, dst_obs_acc)
            bband_utils.runprog(cmd)
Example #13
0
    def setUp(self):
        self.install = InstallCfg()
        self.stations = "nr_v13_3_1.stl"
        self.eventname = "NR"
        self.sim_id = int(seqnum.get_seq_num())
        sta_base = os.path.basename(os.path.splitext(self.stations)[0])
        sim_id = self.sim_id

        # Set up paths
        a_indir = os.path.join(self.install.A_IN_DATA_DIR, str(sim_id))
        a_tmpdir = os.path.join(self.install.A_TMP_DATA_DIR, str(sim_id))
        a_tmpdir_seis = os.path.join(self.install.A_TMP_DATA_DIR,
                                     str(sim_id),
                                     "obs_seis_%s" % (sta_base))
        a_outdir = os.path.join(self.install.A_OUT_DATA_DIR, str(sim_id))
        a_logdir = os.path.join(self.install.A_OUT_LOG_DIR, str(sim_id))
        a_validation_outdir = os.path.join(a_outdir, "validations",
                                           "rzz2015")

        # Create directories
        bband_utils.mkdirs([a_indir, a_tmpdir, a_tmpdir_seis,
                            a_outdir, a_logdir, a_validation_outdir])

        # Copy station list
        cmd = "cp %s %s" % (os.path.join(self.install.A_TEST_REF_DIR,
                                         "rzz2015", self.stations),
                            a_indir)
        bband_utils.runprog(cmd)

        # Read station list
        slo = StationList(os.path.join(a_indir, self.stations))
        site_list = slo.getStationList()

        # Loop over stations
        for site in site_list:
            station = site.scode
            src_sims_acc = os.path.join(self.install.A_TEST_REF_DIR,
                                        "rzz2015", "syn_seis",
                                        "%s.acc.bbp" % (station))
            dst_sims_acc = os.path.join(a_outdir, "%d.%s.acc.bbp" %
                                        (sim_id, station))
            src_obs_acc = os.path.join(self.install.A_TEST_REF_DIR,
                                       "rzz2015", "obs_seis",
                                       "%s.bbp" % (station))
            dst_obs_acc = os.path.join(a_tmpdir_seis, "%s.bbp" %
                                       (station))

            cmd = "cp %s %s" % (src_sims_acc, dst_sims_acc)
            bband_utils.runprog(cmd)

            cmd = "cp %s %s" % (src_obs_acc, dst_obs_acc)
            bband_utils.runprog(cmd)
Example #14
0
    def run(self):
        """
        Run the GP WccSiteamp 2014 module
        """
        print("GP Site Response".center(80, '-'))

        self.install = InstallCfg.getInstance()
        install = self.install
        self.config = WccSiteampCfg()
        config = self.config

        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
        self.log = os.path.join(install.A_OUT_LOG_DIR,
                                str(sim_id),
                                "%d.wcc_siteamp_%s.log" % (sim_id, sta_base))

        a_statfile = os.path.join(install.A_IN_DATA_DIR,
                                  str(sim_id),
                                  self.r_stations)

        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))

        progstring = "mkdir -p %s" % (a_tmpdir)
        bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False)

        #
        # Read and parse the station list with this call
        #
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        for sites in site_list:
            site = sites.scode
            vs30 = sites.vs30
            if vs30 > config.VREF_MAX:
                vs30 = config.VREF_MAX

            print("*** WccSiteamp Processing station %s..." % (site))

            if self.method == "GP":
                self.process_separate_seismograms(site, sta_base, vs30,
                                                  a_indir, a_tmpdir)
            elif self.method == "SDSU" or self.method == "EXSIM" or self.method == "UCSB":
                self.process_hybrid_seismogram(site, sta_base, vs30,
                                               a_tmpdir, a_outdir)

        print("GP Site Response Completed".center(80, '-'))
Example #15
0
    def run(self):
        """
        Calculate GMPEs, create bias plot comparisons
        """
        print("Calculate GMPE".center(80, '-'))

        # Initialize basic variables
        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        # Input, tmp, and output directories
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_outdir_gmpe = os.path.join(a_outdir,
                                     "gmpe_data_%s" % (sta_base))
        a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))

        self.log = os.path.join(a_logdir, "%d.gmpe_compare.log" % (sim_id))

        #
        # Make sure the output and tmp directories exist
        #
        dirs = [a_outdir_gmpe, a_outdir, a_logdir]
        bband_utils.mkdirs(dirs, print_cmd=False)

        # Source file, parse it!
        a_srcfile = os.path.join(install.A_IN_DATA_DIR,
                                  str(sim_id),
                                  self.r_src_file)
        self.src_keys = bband_utils.parse_src_file(a_srcfile)

        # Station file
        a_statfile = os.path.join(install.A_IN_DATA_DIR,
                                  str(sim_id),
                                  self.r_stations)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Go through each station, and print comparison headers for
        # the first station we process
        for site in site_list:
            stat = site.scode
            print("==> Calculating GMPE for station: %s" % (stat))
            output_file = os.path.join(a_outdir_gmpe, "%s-gmpe.ri50" % (stat))
            self.calculate_gmpe(site, output_file)

        # All done
        print("Calculate GMPE Completed".center(80, '-'))
Example #16
0
    def test_anderson_gof(self):
        """
        Run the Anderson GOF test
        """
        gof_obj = AndersonGOF(self.stations, self.eventname, sim_id=self.sim_id)
        gof_obj.run()

        # Check summary GOF file
        ref_sum_file = os.path.join(self.install.A_TEST_REF_DIR, "anderson_gof",
                                    "ref_files",
                                    "gof_anderson.%s.txt" % (self.eventname))
        cal_sum_file = os.path.join(self.install.A_OUT_DATA_DIR,
                                    str(self.sim_id),
                                    "validations",
                                    "anderson_gof",
                                    "%d.gof_anderson.%s.txt" %
                                    (self.sim_id, self.eventname))
        self.failIf(cmp_bbp.cmp_files_generic(ref_sum_file, cal_sum_file,
                                              tolerance=0.005,
                                              start_col=1) != 0,
                    "GOF Summary file does not match reference file!")

        # Read station list
        slo = StationList(os.path.join(self.install.A_IN_DATA_DIR,
                                       str(self.sim_id), self.stations))
        site_list = slo.getStationList()

        # Loop over stations
        for site in site_list:
            station = site.scode

            # Check per-station files
            ref_sum_file = os.path.join(self.install.A_TEST_REF_DIR,
                                        "anderson_gof",
                                        "ref_files",
                                        "gof-%s-anderson-%s.txt" %
                                        (self.eventname, station))
            cal_sum_file = os.path.join(self.install.A_OUT_DATA_DIR,
                                        str(self.sim_id),
                                        "validations",
                                        "anderson_gof",
                                        "gof-%s-%d-anderson-%s.txt" %
                                        (self.eventname, self.sim_id,
                                         station))
            self.failIf(cmp_bbp.cmp_files_generic(ref_sum_file, cal_sum_file,
                                                  tolerance=0.005,
                                                  start_col=1) != 0,
                        "GOF file for station %s does not match!" %
                        (station))
Example #17
0
    def run(self):
        """
        Run the GP WccSiteamp 2014 module
        """
        print("GP Site Response".center(80, '-'))

        self.install = InstallCfg.getInstance()
        install = self.install
        self.config = WccSiteampCfg()
        config = self.config

        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
        self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id),
                                "%d.wcc_siteamp_%s.log" % (sim_id, sta_base))

        a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
                                  self.r_stations)

        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))

        progstring = "mkdir -p %s" % (a_tmpdir)
        bband_utils.runprog(progstring, abort_on_error=True, print_cmd=False)

        #
        # Read and parse the station list with this call
        #
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        for sites in site_list:
            site = sites.scode
            vs30 = sites.vs30
            if vs30 > config.VREF_MAX:
                vs30 = config.VREF_MAX

            print("*** WccSiteamp Processing station %s..." % (site))

            if self.method == "GP":
                self.process_separate_seismograms(site, sta_base, vs30,
                                                  a_indir, a_tmpdir)
            elif self.method == "SDSU" or self.method == "EXSIM" or self.method == "UCSB":
                self.process_hybrid_seismogram(site, sta_base, vs30, a_tmpdir,
                                               a_outdir)

        print("GP Site Response Completed".center(80, '-'))
Example #18
0
 def create_kml_output(self,
                       a_station_file,
                       kml_file,
                       hypo_lat=None,
                       hypo_lon=None):
     """
     Creates a kml output file containing all stations and the fault
     """
     kml = simplekml.Kml()
     stl = StationList(a_station_file).getStationList()
     # Add stations first
     for stat in stl:
         kml.newpoint(name=stat.scode, coords=[(stat.lon, stat.lat)])
     # Now add hypocenter
     if hypo_lat is not None and hypo_lon is not None:
         hyp = kml.newpoint(name="Hypocenter",
                            coords=[(hypo_lon, hypo_lat)])
         hyp.style.iconstyle.color = simplekml.Color.red
     # Add fault trace
     if self.trace is not None and len(self.trace) > 0:
         line_str = kml.newlinestring(name="Fault")
         line_str.altitudemode = simplekml.AltitudeMode.clamptoground
         line_str.style.linestyle.color = simplekml.Color.red
         line_str.style.linestyle.width = 5
         line_str.tessellate = 1
         points = []
         for point in self.trace:
             points.append((point[0], point[1], 0))
         line_str.coords = points
     # Save kml file
     kml.save(kml_file)
Example #19
0
    def run(self):
        """
        Calculate GMPEs, create bias plot comparisons
        """
        print("Calculate GMPE".center(80, '-'))

        # Initialize basic variables
        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        # Input, tmp, and output directories
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_outdir_gmpe = os.path.join(a_outdir, "gmpe_data_%s" % (sta_base))
        a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))

        self.log = os.path.join(a_logdir, "%d.gmpe_compare.log" % (sim_id))

        #
        # Make sure the output and tmp directories exist
        #
        dirs = [a_outdir_gmpe, a_outdir, a_logdir]
        bband_utils.mkdirs(dirs, print_cmd=False)

        # Source file, parse it!
        a_srcfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
                                 self.r_src_file)
        self.src_keys = bband_utils.parse_src_file(a_srcfile)

        # Station file
        a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
                                  self.r_stations)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Go through each station, and print comparison headers for
        # the first station we process
        for site in site_list:
            stat = site.scode
            print("==> Calculating GMPE for station: %s" % (stat))
            output_file = os.path.join(a_outdir_gmpe, "%s-gmpe.ri50" % (stat))
            self.calculate_gmpe(site, output_file)

        # All done
        print("Calculate GMPE Completed".center(80, '-'))
Example #20
0
    def run(self):
        """
        Goes through the station list and copy each low-frequency
        seismogram from the seis_dir to the simulation's tmpdir
        """
        install = InstallCfg.getInstance()
        sim_id = self.sim_id

        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        a_stations = os.path.join(a_indir, self.r_stations)

        print(self.seis_dir)

        slo = StationList(a_stations)
        stat_list = slo.getStationList()
        for stat in stat_list:
            # Look for bbp seismogram, copy in
            print("%s/%s-lf.bbp" % (self.seis_dir, stat.scode))
            if os.path.exists("%s/%s-lf.bbp" % (self.seis_dir, stat.scode)):
                print("Copying for site %s" % (stat.scode))
                # Need to eliminate negative times
                fp_in = open("%s/%s-lf.bbp" % (self.seis_dir, stat.scode), 'r')
                fp_out = open("%s/%d.%s-lf.bbp" %
                              (a_tmpdir, sim_id, stat.scode), 'w')
                for line in fp_in:
                    pieces = line.split()
                    try:
                        if pieces[0] == '#' or pieces[0] == '%':
                            fp_out.write(line)
                        elif float(pieces[0]) < -0.0001:
                            continue
                        elif float(pieces[0]) < 0.0001:
                            fp_out.write("0.0\t%s\t%s\t%s\n" % (pieces[1],
                                                                pieces[2],
                                                                pieces[3]))
                        else:
                            fp_out.write(line)
                    except ValueError:
                        fp_out.write(line)
                fp_in.close()
                fp_out.flush()
                fp_out.close()
            else:
                print("Could not find LF seismogram for station %s!" %
                      (stat.scode))
Example #21
0
    def generate_plot(self, a_statfile, a_dstdir):
        """
        This function generates the bias plot with ratio of maximum to
        median response across orientations (RotD100/RotD50)
        """
        install = install_cfg.InstallCfg.getInstance()
        sim_id = self.sim_id
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        rd100_resid_output = os.path.join(
            a_dstdir, "%s-%d-resid-rd100.txt" % (self.comp_label, sim_id))
        for comp in ['rotd50', 'rotd100', 'ratio']:
            # Build paths and check lengths
            fileroot = os.path.join(
                a_dstdir, "%s-%d_r0-%d-rd100-%s" %
                (self.comp_label, sim_id, self.max_cutoff, comp))
            bband_utils.check_path_lengths([rd100_resid_output, fileroot],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s " %
                   (os.path.join(install.A_GP_BIN_DIR, "resid2uncer_varN")) +
                   "residfile=%s fileroot=%s " %
                   (rd100_resid_output, fileroot) +
                   "comp=%s nstat=%d nper=63 " % (comp, len(site_list)) +
                   "min_cdst=%d max_cdst=%d >> %s 2>&1" %
                   (0, self.max_cutoff, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

        # Generate bias plot
        plot_mode = 'rd100'
        fileroot = ("%s-%d_r0-%d-rd100" %
                    (self.comp_label, sim_id, self.max_cutoff))
        plottitle = ("GOF Comparison between %s and simulation %d" %
                     (self.comp_label, sim_id))
        plotter = PlotGoF()
        plotter.plot(plottitle,
                     fileroot,
                     a_dstdir,
                     a_dstdir,
                     cutoff=self.max_cutoff,
                     mode=plot_mode,
                     colorset='single')
Example #22
0
def write_simple_stations(station_file, out_file):
    """
    This function parses the station file and writes a simple
    version with just longitude, latitude, and station code
    """
    stl = StationList(station_file).getStationList()
    fp_out = open(out_file, 'w')
    for stat in stl:
        fp_out.write("%f %f %s\n" % (stat.lon, stat.lat, stat.scode))
    fp_out.flush()
    fp_out.close()
Example #23
0
    def generate_plot(self, a_statfile, a_dstdir):
        """
        This function generates the bias plot with ratio of maximum to
        median response across orientations (RotD100/RotD50)
        """
        install = install_cfg.InstallCfg.getInstance()
        sim_id = self.sim_id
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        rd100_resid_output = os.path.join(a_dstdir, "%s-%d-resid-rd100.txt" %
                                          (self.comp_label, sim_id))
        for comp in ['rotd50', 'rotd100', 'ratio']:
            # Build paths and check lengths
            fileroot = os.path.join(a_dstdir, "%s-%d_r0-%d-rd100-%s" %
                                    (self.comp_label, sim_id,
                                     self.max_cutoff, comp))
            bband_utils.check_path_lengths([rd100_resid_output, fileroot],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR,
                                         "resid2uncer_varN")) +
                   "residfile=%s fileroot=%s " %
                   (rd100_resid_output, fileroot) +
                   "comp=%s nstat=%d nper=63 " %
                   (comp, len(site_list)) +
                   "min_cdst=%d max_cdst=%d >> %s 2>&1" %
                   (0, self.max_cutoff, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

        # Generate bias plot
        plot_mode = 'rd100'
        fileroot = ("%s-%d_r0-%d-rd100" %
                    (self.comp_label, sim_id,
                     self.max_cutoff))
        plottitle = ("GOF Comparison between %s and simulation %d" %
                     (self.comp_label, sim_id))
        plotter = PlotGoF()
        plotter.plot(plottitle, fileroot, a_dstdir, a_dstdir,
                     cutoff=self.max_cutoff, mode=plot_mode, colorset='single')
Example #24
0
def set_boundaries_from_stations(station_file):
    """
    This function sets the north, south, east, and west boundaries
    of the region we should plot, using the stations' locations in
    the station file
    """
    # Start without anything
    north = None
    south = None
    east = None
    west = None

    # First we read the stations
    stations = StationList(station_file).getStationList()
    # Now go through each one, keeping track of its locations
    for station in stations:
        # If this is the first station, use its location
        if north is None:
            north = station.lat
            south = station.lat
            east = station.lon
            west = station.lon
            # Next station
            continue
        if station.lat > north:
            north = station.lat
        elif station.lat < south:
            south = station.lat
        if station.lon > east:
            east = station.lon
        elif station.lon < west:
            west = station.lon

    # Great, now we just add a buffer on each side
    if north < (90 - BUFFER_LATITUDE):
        north = north + BUFFER_LATITUDE
    else:
        north = 90
    if south > (-90 + BUFFER_LATITUDE):
        south = south - BUFFER_LATITUDE
    else:
        south = -90
    if east < (180 - BUFFER_LONGITUDE):
        east = east + BUFFER_LONGITUDE
    else:
        east = 180
    if west > (-180 + BUFFER_LONGITUDE):
        west = west - BUFFER_LONGITUDE
    else:
        west = -180

    return north, south, east, west
Example #25
0
    def setUp(self):
        """
        Set up unit test
        """
        self.install = InstallCfg()
        self.r_velocity = "nr02-vs500_lf.vel"
        self.r_stations = "one_stat.txt"
        self.r_src = "test_wh_ucsb.src"
        self.r_srf = "test_ucsb.srf"
        self.sim_id = int(seqnum.get_seq_num())
        a_indir = os.path.join(self.install.A_IN_DATA_DIR, str(self.sim_id))
        a_tmpdir = os.path.join(self.install.A_TMP_DATA_DIR, str(self.sim_id))
        a_outdir = os.path.join(self.install.A_OUT_DATA_DIR, str(self.sim_id))
        a_logdir = os.path.join(self.install.A_OUT_LOG_DIR, str(self.sim_id))

        #
        # Make sure output directories exist
        #
        bband_utils.mkdirs([a_indir, a_tmpdir, a_outdir, a_logdir],
                           print_cmd=False)

        # Copy files
        a_refdir = os.path.join(self.install.A_TEST_REF_DIR, "ucsb")

        # Copy other input files
        shutil.copy2(os.path.join(a_refdir, self.r_stations), a_indir)
        shutil.copy2(os.path.join(a_refdir, self.r_velocity), a_indir)
        shutil.copy2(os.path.join(a_refdir, self.r_src), a_indir)
        shutil.copy2(os.path.join(a_refdir, self.r_srf), a_indir)

        # Copy seismograms
        slo = StationList(os.path.join(a_indir, self.r_stations))
        site_list = slo.getStationList()
        for site in site_list:
            shutil.copy2(os.path.join(a_refdir, "%s.3comp" % (site.scode)),
                         a_tmpdir)

        # Change directory to tmpdir
        os.chdir(a_tmpdir)
Example #26
0
    def calculate_ratios(self, a_statfile, a_dstdir):
        """
        This function adds an extra column to the rd100 files
        containing the RotD100/RotD50 ratio. It does this for both
        observations and simulated data files.
        """
        sim_id = self.sim_id
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Loop through all stations
        for site in site_list:
            stat = site.scode

            # Simulated data file
            basename = "%d.%s.rd100" % (sim_id, stat)
            filename = os.path.join(a_dstdir, basename)
            self.calculate_ratio(filename)

            # Observation data file
            basename = "%s.rd100" % (stat)
            filename = os.path.join(a_dstdir, basename)
            self.calculate_ratio(filename)
Example #27
0
    def calculate_ratios(self, a_statfile, a_dstdir):
        """
        This function adds an extra column to the rd100 files
        containing the RotD100/RotD50 ratio. It does this for both
        observations and simulated data files.
        """
        sim_id = self.sim_id
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Loop through all stations
        for site in site_list:
            stat = site.scode

            # Simulated data file
            basename = "%d.%s.rd100" % (sim_id, stat)
            filename = os.path.join(a_dstdir, basename)
            self.calculate_ratio(filename)

            # Observation data file
            basename = "%s.rd100" % (stat)
            filename = os.path.join(a_dstdir, basename)
            self.calculate_ratio(filename)
Example #28
0
def sdsu2uc_subset(sdsu_stat_file, uc_stalist_in, uc_vs30_in,
                   uc_stalist_out, uc_vs30_out):
    # Read input file
    stat_file_fp = open(sdsu_stat_file, "r")
    data = stat_file_fp.readlines()
    stat_file_fp.close()
    for i in range(0, len(data)):
        pieces = data[i].split()
        if len(pieces) > 1:
            if pieces[1] == "X":
                break
    stat_names = []
    for j in range(i + 1, len(data)):
        pieces = data[j].split()
        stat_names.append(pieces[2])

    new_list = []
    slo = StationList(uc_stalist_in).getStationList()
    for stat in stat_names:
        for entry in slo:
            if stat == entry.scode:
                new_list.append(entry)
    StationList.build(new_list, uc_stalist_out)

    fp = open(uc_vs30_in, 'r')
    vs30_dict = dict()
    for line in fp.readlines():
        pieces = line.split()
        vs30_dict[pieces[0]] = pieces[1]
    fp.close()

    fp = open(uc_vs30_out, 'w')
    for stat in stat_names:
        if stat in vs30_dict:
            fp.write("\t%s\t%s\n" % (stat, vs30_dict[stat]))
    fp.flush()
    fp.close()
Example #29
0
def load_all_data(input_indir, input_outdir):
    """
    This function goes through all realizations and loads all data to
    the DATA dictionary
    """
    # First create data dictionary
    data = {}
    # First level is C1..CMAX
    for i in range(10):
        data[i] = {}
        # Second level is B1..BMAX
        for j in range(BMAX):
            data[i][j] = {}

    # Get realizations
    realizations = sorted(os.listdir(input_indir))
    one_realization = realizations[0]
    basedir = os.path.join(input_indir, one_realization)

    # Get the station list
    a_statfile = glob.glob("%s%s*.stl" % (basedir, os.sep))
    if len(a_statfile) != 1:
        raise bband_utils.ProcessingError("Cannot get station list!")
    a_statfile = a_statfile[0]
    slo = StationList(a_statfile)
    site_list = slo.getStationList()

    # Go through all stations
    for site in site_list:
        station = site.scode
        print "working on station: %s" % (station)

        # Read data for this station
        load_station_data(input_outdir, data, station)

    # Return data dictionary
    return data
Example #30
0
    def test_gp2uc(self):
        """
        Inputs a GP format file and get out a UCSB format file
        """
        install = InstallCfg()
        sim_id = int(seqnum.get_seq_num())
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_refdir = os.path.join(install.A_TEST_REF_DIR, "ucsb")

        #
        # Make sure output directories exist
        #
        bband_utils.mkdirs([a_tmpdir], print_cmd=False)

        # File paths
        gpfile = os.path.join(a_refdir, "stats-h0.125.ll")
        ucref = os.path.join(a_refdir, "stations.ll")
        ofile = os.path.join(a_tmpdir, "stations.ll")
        ofile30 = os.path.join(a_tmpdir, "stations.vs30")

        sl = StationList(gpfile)
        _ = stas2files.gp2uc_stalist(sl, ofile, ofile30)
        errmsg = "Conversion of station list from GP to UC format failed"
        self.failIf(filecmp.cmp(ucref, ofile) == False, errmsg)
Example #31
0
    def run(self):
        """
        Runs the GMPEs for the six parameters in Rezaeian (2015)
        """
        print("RZZ2015 GMPE".center(80, '-'))

        # Load configuration, set sim_id
        install = InstallCfg.getInstance()
        sim_id = self.sim_id

        # Build directory paths
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))
        a_validation_outdir = os.path.join(a_outdir, "validations",
                                           "rzz2015_gmpe")

        # Make sure the output and tmp directories exist
        bband_utils.mkdirs([a_tmpdir, a_indir, a_outdir, a_validation_outdir],
                           print_cmd=False)

        # Source file, parse it!
        a_srcfile = os.path.join(a_indir, self.srcfile)
        self.src_keys = bband_utils.parse_src_file(a_srcfile)

        # Now the file paths
        self.log = os.path.join(a_logdir, "%d.rzz2015gmpe.log" % (sim_id))
        sta_file = os.path.join(a_indir, self.stations)

        # Get station list
        slo = StationList(sta_file)
        site_list = slo.getStationList()

        # Initialize random seed
        np.random.seed(int(self.src_keys['seed']))

        # Create output file, add header
        out_file = open(
            os.path.join(a_validation_outdir,
                         '%d.rzz2015gmpe.txt' % (self.sim_id)), 'w')
        out_file.write("#station, r_rup, vs_30,"
                       " ai_mean, d595_mean, tmid_mean,"
                       " wmid_mean, wslp_mean, zeta_mean,"
                       " ai_stddev, d595_stddev, tmid_stddev,"
                       " wmid_stddev, wslp_stddev, zeta_stddev\n")

        # Go through each station
        for site in site_list:
            stat = site.scode
            print("==> Processing station: %s" % (stat))

            # Calculate Rrup
            origin = (self.src_keys['lon_top_center'],
                      self.src_keys['lat_top_center'])
            dims = (self.src_keys['fault_length'], self.src_keys['dlen'],
                    self.src_keys['fault_width'], self.src_keys['dwid'],
                    self.src_keys['depth_to_top'])
            mech = (self.src_keys['strike'], self.src_keys['dip'],
                    self.src_keys['rake'])

            site_geom = [float(site.lon), float(site.lat), 0.0]
            (fault_trace1, up_seis_depth, low_seis_depth, ave_dip, dummy1,
             dummy2) = putils.FaultTraceGen(origin, dims, mech)
            _, rrup, _ = putils.DistanceToSimpleFaultSurface(
                site_geom, fault_trace1, up_seis_depth, low_seis_depth,
                ave_dip)

            vs30 = site.vs30
            mag = self.src_keys['magnitude']
            # Fault type is 1 (Reverse) unless condition below is met
            # Then it is 0 (Strike-pp)
            fault_type = 1
            rake = self.src_keys['rake']
            if ((rake >= -180 and rake < -150) or (rake >= -30 and rake <= 30)
                    or (rake > 150 and rake <= 180)):
                fault_type = 0

            #rrup = 13.94
            #fault_type = 1
            #vs30 = 659.6
            #mag = 7.35

            [ai_mean, d595_mean, tmid_mean, wmid_mean, wslp_mean,
             zeta_mean] = self.calculate_mean_values(rrup, vs30, mag,
                                                     fault_type)

            # Randomize parameters using standard deviations and correlations
            sta_ai = []
            sta_d595 = []
            sta_tmid = []
            sta_wmid = []
            sta_wslp = []
            sta_zeta = []

            # Simulate number_of_samples realizations of the error
            # term for each parameter
            for _ in range(0, self.number_of_samples):
                # Simulate zero-mean normal correlated parameters with
                # stdv = sqrt(sigmai^2+taui^2)
                # totalerror = eps+etha=[eps1+etha1 eps2+etha2 eps3+etha3 eps4+etha4
                #                        eps5+etha5 eps6+etha6]

                # mean error vector
                # m_totalerror = [0, 0, 0, 0, 0, 0]

                # Covariance matrix
                std1 = np.sqrt(self.sigma1**2 + self.tau1**2)
                std2 = np.sqrt(self.sigma2**2 + self.tau2**2)
                std3 = np.sqrt(self.sigma3**2 + self.tau3**2)
                std4 = np.sqrt(self.sigma4**2 + self.tau4**2)
                std5 = np.sqrt(self.sigma5**2 + self.tau5**2)
                std6 = np.sqrt(self.sigma6**2 + self.tau6**2)

                s_total_error = [
                    [
                        std1**2, std1 * std2 * self.rho_totalerror[0][1],
                        std1 * std3 * self.rho_totalerror[0][2],
                        std1 * std4 * self.rho_totalerror[0][3],
                        std1 * std5 * self.rho_totalerror[0][4],
                        std1 * std6 * self.rho_totalerror[0][5]
                    ],
                    [
                        std2 * std1 * self.rho_totalerror[1][0], std2**2,
                        std2 * std3 * self.rho_totalerror[1][2],
                        std2 * std4 * self.rho_totalerror[1][3],
                        std2 * std5 * self.rho_totalerror[1][4],
                        std2 * std6 * self.rho_totalerror[1][5]
                    ],
                    [
                        std3 * std1 * self.rho_totalerror[2][0],
                        std3 * std2 * self.rho_totalerror[2][1], std3**2,
                        std3 * std4 * self.rho_totalerror[2][3],
                        std3 * std5 * self.rho_totalerror[2][4],
                        std3 * std6 * self.rho_totalerror[2][5]
                    ],
                    [
                        std4 * std1 * self.rho_totalerror[3][0],
                        std4 * std2 * self.rho_totalerror[3][1],
                        std4 * std3 * self.rho_totalerror[3][2], std4**2,
                        std4 * std5 * self.rho_totalerror[3][4],
                        std4 * std6 * self.rho_totalerror[3][5]
                    ],
                    [
                        std5 * std1 * self.rho_totalerror[4][0],
                        std5 * std2 * self.rho_totalerror[4][1],
                        std5 * std3 * self.rho_totalerror[4][2],
                        std5 * std4 * self.rho_totalerror[4][3], std5**2,
                        std5 * std6 * self.rho_totalerror[4][5]
                    ],
                    [
                        std6 * std1 * self.rho_totalerror[5][0],
                        std6 * std2 * self.rho_totalerror[5][1],
                        std6 * std3 * self.rho_totalerror[5][2],
                        std6 * std4 * self.rho_totalerror[5][3],
                        std6 * std5 * self.rho_totalerror[5][4], std6**2
                    ]
                ]
                # Matlab returns upper-triangular while Python returns
                # lower-triangular by default -- no need to transpose later!
                r_total_error = np.linalg.cholesky(s_total_error)
                y_total_error = np.random.normal(0, 1, 6)
                total_error = np.dot(r_total_error, y_total_error)

                # Generate randomize parameters in the standardnormal space: ui
                u1 = (self.beta1[0] + self.beta1[1] *
                      (mag / 7.0) + self.beta1[2] * fault_type +
                      self.beta1[3] * math.log(rrup / 25.0) +
                      self.beta1[4] * math.log(vs30 / 750.0)) + total_error[0]
                u2 = (self.beta2[0] + self.beta2[1] * mag +
                      self.beta2[2] * fault_type + self.beta2[3] * rrup +
                      self.beta2[4] * vs30) + total_error[1]
                u3 = (self.beta3[0] + self.beta3[1] * mag +
                      self.beta3[2] * fault_type + self.beta3[3] * rrup +
                      self.beta3[4] * vs30) + total_error[2]
                u4 = (self.beta4[0] + self.beta4[1] * mag +
                      self.beta4[2] * fault_type + self.beta4[3] * rrup +
                      self.beta4[4] * vs30) + total_error[3]
                u5 = (self.beta5[0] + self.beta5[1] * mag +
                      self.beta5[2] * fault_type + self.beta5[3] * rrup +
                      self.beta5[4] * vs30) + total_error[4]
                u6 = (self.beta6[0] + self.beta6[1] * mag +
                      self.beta6[2] * fault_type + self.beta6[3] * rrup +
                      self.beta6[4] * vs30) + total_error[5]

                # Transform parameters ui from standardnormal to the physical space:
                # thetai (constraint: tmid < d_5_95, removed)
                theta1 = norm.ppf(norm.cdf(u1), -4.8255, 1.4318)
                theta2 = 5.0 + (45 - 5) * beta.ppf(norm.cdf(u2), 1.1314,
                                                   2.4474)
                theta3 = 0.5 + (40 - 0.5) * beta.ppf(norm.cdf(u3), 1.5792,
                                                     3.6405)
                theta4 = gamma.ppf(norm.cdf(u4), 4.0982, scale=1.4330)
                theta5 = self.slpinv(norm.cdf(u5), 17.095, 6.7729, 4.8512, -2,
                                     0.5)
                theta6 = 0.02 + (1 - 0.02) * beta.ppf(norm.cdf(u6), 1.4250,
                                                      5.7208)

                sta_ai.append(math.exp(theta1))
                sta_d595.append(theta2)
                sta_tmid.append(theta3)
                sta_wmid.append(theta4)
                sta_wslp.append(theta5)
                sta_zeta.append(theta6)

            # Write output to gmpe file
            out_file.write(
                "%s, %7.4f, %7.2f, " % (stat, rrup, vs30) +
                "%7.4f, %7.4f, %7.4f, %7.4f, %7.4f, %7.4f, " %
                (ai_mean, d595_mean, tmid_mean, wmid_mean, wslp_mean,
                 zeta_mean) + "%7.4f, %7.4f, %7.4f, %7.4f, %7.4f, %7.4f\n" %
                (np.std(sta_ai), np.std(sta_d595), np.std(sta_tmid),
                 np.std(sta_wmid), np.std(sta_wslp), np.std(sta_zeta)))

            ## Write output to file
            #sta_out_file = open(os.path.join(a_validation_outdir,
            #                                 '%d.rzz2015gmpe.%s.txt' %
            #                                 (self.sim_id, stat)), 'w')
            #sta_out_file.write("#ai(s.g^2), d595(s), tmid(s), "
            #                   "wmid(Hz), wslp(Hz/sec), zeta(ratio)\n")
            #for ai, d595, tmid, wmid, wslp, zeta in zip(sta_ai, sta_d595,
            #                                            sta_tmid, sta_wmid,
            #                                            sta_wslp, sta_zeta):
            #    sta_out_file.write("%7.4f, %7.4f, %7.4f, %7.4f, %7.4f, %7.4f\n" %
            #                       (ai, d595, tmid, wmid, wslp, zeta))
            #sta_out_file.close()

            # Generate Plots
            self.plot(stat, a_validation_outdir, rrup, fault_type, vs30, mag,
                      sta_ai, sta_d595, sta_tmid, sta_wmid, sta_wslp, sta_zeta,
                      ai_mean, d595_mean, tmid_mean, wmid_mean, wslp_mean,
                      zeta_mean)

        # Close output file
        out_file.close()
        print("RZZ2015 GMPE Completed".center(80, '-'))
Example #32
0
    def run(self):
        """
        Extracts needed seismograms from the bin file
        """
        print("SDSU Seismograms".center(80, '-'))

        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_tmpdir_mod = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                    "sdsu_seismograms_%s" % (sta_base))

        binfile = os.path.join(a_indir, self.r_binfile)

        #
        # Make sure the output and tmp directories exist
        #
        bband_utils.mkdirs([a_tmpdir, a_tmpdir_mod, a_outdir],
                           print_cmd=False)

        a_full_stations = os.path.join(a_indir, self.r_full_stations)
        a_stations = os.path.join(a_indir, self.r_stations)

        # Copy station files to the tmpdir_mod directory
        cmd = "cp %s %s" % (a_full_stations,
                            os.path.join(a_tmpdir_mod, self.r_full_stations))
        bband_utils.runprog(cmd)
        cmd = "cp %s %s" % (a_stations,
                            os.path.join(a_tmpdir_mod, self.r_stations))
        bband_utils.runprog(cmd)

        #
        # Make sure path names are within the limits accepted by the
        # Fortran code
        #
        if len(binfile) >= bband_utils.SDSU_MAX_FILENAME:
            raise ValueError("binfile is %d characters long, maximum is %d" %
                             (len(binfile), bband_utils.SDSU_MAX_FILENAME))

        old_cwd = os.getcwd()
        os.chdir(a_tmpdir_mod)

        # Get number of stations in seismogram file, this is a
        # variation of the code in station_list.py
        stat_names = {}
        num_stations = 0
        stat_fp = open(a_full_stations, 'r')
        for line in stat_fp:
            if line.startswith('#'):
                continue
            sta = line.split()
            if len(sta) >= 3:
                scode = sta[2]
                num_stations = num_stations + 1
                stat_names[scode] = num_stations
        stat_fp.close()

        # Create list of stations to save
        slo = StationList(a_stations)
        site_list = slo.getStationList()
        save_stat_names = []
        for stat in site_list:
            save_stat_names.append(stat.scode)

        # Convert to bbp format
        cmd = "%s/bin2bbp %s %d" % (install.A_SDSU_BIN_DIR,
                                    binfile, len(stat_names))
        bband_utils.runprog(cmd)

        # Copy over the names
        for stat in save_stat_names:
            if not stat in stat_names:
                continue
            sta_id = stat_names[stat]
            shutil.copy2("%s/%d.bbp" % (a_tmpdir_mod, sta_id),
                         "%s/%d.%s-lf.bbp" %
                         (a_tmpdir, sim_id, stat))
            del stat_names[stat]

        # Delete the ones you don't need
        for stat in stat_names.keys():
            os.remove("%s/%d.bbp" %
                      (a_tmpdir_mod, stat_names[stat]))

        os.chdir(old_cwd)

        print("SDSU Seismograms Completed".center(80, '-'))
Example #33
0
output_dir = sys.argv[5]

# Create directory paths
install = InstallCfg.getInstance()
config = GPGofCfg()
a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id_1))
a_outdir1 = os.path.join(install.A_OUT_DATA_DIR, str(sim_id_1))
a_outdir2 = os.path.join(install.A_OUT_DATA_DIR, str(sim_id_2))

# Src file
a_srcfile = os.path.join(a_indir, src_file)
src_keys = bband_utils.parse_src_file(a_srcfile)

# Station file
a_statfile = os.path.join(a_indir, station_list)
slo = StationList(a_statfile)
site_list = slo.getStationList()

# Capture event_label
bias_file = glob.glob("%s%s*.bias" % (a_outdir1, os.sep))
if len(bias_file) < 1:
    raise bband_utils.ProcessingError("Cannot find event label!")
bias_file = bias_file[0]
# Let's capture the event label
event_label = os.path.basename(bias_file).split("-")[0]

print_header_rd50 = 1

# Go through the stations
for site in site_list:
    stat = site.scode
Example #34
0
    def run(self):
        """
        Go through the station list and create acceleration
        seismogram. Copy results to outdata directory
        """
        print("Copy Seismograms".center(80, '-'))

        install = InstallCfg.getInstance()
        sim_id = self.sim_id

        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
        self.log = os.path.join(install.A_OUT_LOG_DIR,
                                str(sim_id),
                                "%d.copy_seis_%s.log" % (sim_id, sta_base))
        a_statfile = os.path.join(install.A_IN_DATA_DIR,
                                  str(sim_id),
                                  self.r_stations)

        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))

        # Make sure tmpdir, outdir exist
        dirs = [a_tmpdir, a_outdir]
        bband_utils.mkdirs(dirs, print_cmd=False)

        #
        # Read and parse the statioin list with this call
        #
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        for sits in site_list:
            site = sits.scode
            print("==> Processing station: %s" % (site))

            if self.hybrid:
                expected_file = "%d.%s.bbp" % (sim_id, site)
            else:
                expected_file = "%d.%s.vel.bbp" % (sim_id, site)

            #print("Processing velocity for station %s - %s" %
            #      (site, expected_file))
            bbpfile = os.path.join(a_tmpdir, expected_file)

            # Make sure velocity file is there, otherwise, skip this station
            if not os.path.exists(bbpfile):
                print("No velocity seismograms found for station %s" %
                      (site))
                print("Skipping this station...")
                continue

            # Copy velocity bbp file to outdir
            shutil.copy2(bbpfile,
                         os.path.join(a_outdir, "%d.%s.vel.bbp" %
                                      (sim_id, site)))

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s.000" % (sim_id, site))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s.090" % (sim_id, site))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s.ver" % (sim_id, site))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s/wcc2bbp " % (install.A_GP_BIN_DIR) +
                   "nsfile=%s ewfile=%s udfile=%s " %
                   (nsfile, ewfile, udfile) +
                   "wcc2bbp=0 < %s >> %s 2>&1" %
                   (bbpfile, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            for comp in ['000', '090', 'ver']:
                # Create path names and check if their sizes are
                # within bounds
                filein = os.path.join(a_tmpdir,
                                      "%d.%s.%s" %
                                      (sim_id, site, comp))
                fileout = os.path.join(a_tmpdir,
                                       "%d.%s.acc.%s" %
                                       (sim_id, site, comp))

                bband_utils.check_path_lengths([filein, fileout],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s/integ_diff diff=1 filein=%s fileout=%s" %
                       (install.A_GP_BIN_DIR, filein, fileout))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.000" % (sim_id, site))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.090" % (sim_id, site))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.ver" % (sim_id, site))
            bbpfile = os.path.join(a_tmpdir,
                                   "%d.%s.acc.bbp" % (sim_id, site))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s/wcc2bbp " % (install.A_GP_BIN_DIR) +
                   "nsfile=%s ewfile=%s udfile=%s " %
                   (nsfile, ewfile, udfile) +
                   "units=cm/s/s wcc2bbp=1 > %s 2>> %s" %
                   (bbpfile, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # Copy acceleration bbp file to outdir
            shutil.copy2(bbpfile,
                         os.path.join(a_outdir, "%d.%s.acc.bbp" %
                                      (sim_id, site)))

        print("Copy Seismograms Completed".center(80, '-'))
Example #35
0
class Syn1D(object):
    """
    Implement UCSB syn1D as a Broadband Component
    """

    def __init__(self, i_r_velmodel, i_r_srcfile, i_r_srffile,
                 i_r_stations, i_vmodel_name, sim_id=0):
        """
        Initialize basic class parameters
        """
        self.sim_id = sim_id
        self.r_velmodel = i_r_velmodel
        self.r_srcfile = i_r_srcfile
        self.r_srffile = i_r_srffile
        self.r_stations = i_r_stations
        self.vmodel_name = i_vmodel_name
        self.cfg = None
        self.slo = None
        self.a_indir = None
        self.a_tmpdir = None

    def create_fault_global_in(self, a_fault_file):
        """
        This function creates the faultglobal.in file from data
        obtained in the src file
        """
        out_fp = open(a_fault_file, "w")
        out_fp.write("%.3f %.3f %2.3f\n" %
                     (self.cfg.CFGDICT['lon_top_center'],
                      self.cfg.CFGDICT['lat_top_center'],
                      self.cfg.CFGDICT["depth_to_top"]))
        out_fp.write("%.2f %.2f\n" %
                     (self.cfg.CFGDICT["fault_length"],
                      self.cfg.CFGDICT["fault_width"]))
        out_fp.write("%.1f %.1f %.1f\n" %
                     (self.cfg.CFGDICT['strike'],
                      self.cfg.CFGDICT['dip'],
                      self.cfg.CFGDICT['rake']))
        out_fp.write("%.2f %.2f\n" %
                     (self.cfg.CFGDICT["hypo_along_stk"],
                      self.cfg.CFGDICT["hypo_down_dip"]))
        out_fp.write("%.2f\n" % (self.cfg.CFGDICT['magnitude']))
        out_fp.write("-1 -1\n")
        out_fp.write("%d\n" % (int(self.cfg.CFGDICT['seed'])))
        out_fp.write("-1\n")
        out_fp.write("-1\n")
        out_fp.write("%s\n" % (self.r_velmodel))
        out_fp.write("FFSP_OUTPUT\n")
        out_fp.close()

    def run_syn1d(self, a_tmpdir_mod, a_velmodel,
                  a_greenfile, a_greensoil, a_lahfile):
        """
        Run the Syn1D simulator with the parameters provided
        """
        # Store cwd and change over to tmpdir so the executable can
        # find the files
        old_cwd = os.getcwd()
        os.chdir(a_tmpdir_mod)

        # Copy velocity model
        r_velmodel = os.path.basename(a_velmodel)
        shutil.copy2(a_velmodel, os.path.join(a_tmpdir_mod, r_velmodel))

        # This is not a UCSB format station list, convert station
        # list to UCSB format, generating the station file and the
        # vs30 file
        a_uc_stations = os.path.join(a_tmpdir_mod, self.cfg.R_UC_STATION_FILE)
        a_uc_vs30 = os.path.join(a_tmpdir_mod, self.cfg.R_UC_VS30_FILE)
        stas2files.gp2uc_stalist(self.slo, a_uc_stations, a_uc_vs30)

        #
        # The UCSB codes require fixed input names.  So here, we copy
        # the UCSB file over to the expected name "stations.ll"
        #
        shutil.copy2(a_uc_stations,
                     os.path.join(a_tmpdir_mod, "stations.ll"))

        # Now copy source_model.list
        shutil.copy2(os.path.join(self.a_tmpdir, self.cfg.R_UC_SOURCE_MODEL),
                     os.path.join(a_tmpdir_mod, self.cfg.R_UC_SOURCE_MODEL))

        #
        # Define SRF file as files in the working directory
        #
        a_ffspfile = os.path.join(self.a_indir, self.cfg.R_FFSP_FILE)
        shutil.copy2(a_ffspfile, a_tmpdir_mod)
        a_ffspfile = os.path.join(a_tmpdir_mod, self.cfg.R_FFSP_FILE)

        #
        # Move a Green_Bank.inf and associated velocity model
        # into the working directory
        #
        shutil.copy2(a_greenfile, os.path.join(a_tmpdir_mod,
                                               os.path.basename(a_greenfile)))

        # Symlink green soil (too big to copy)
        if (not os.path.exists(os.path.join(a_tmpdir_mod,
                                            os.path.basename(a_greensoil)))):
            os.symlink(a_greensoil, os.path.join(a_tmpdir_mod,
                                                 os.path.basename(a_greensoil)))

        #
        # Move syn_1d.inp into working directory
        #
        shutil.copy2(a_lahfile, os.path.join(a_tmpdir_mod,
                                             os.path.basename(a_lahfile)))

        #
        # Create faultGlobal.in
        #
        r_faultfile = "faultGlobal.in"
        a_faultfile = os.path.join(a_tmpdir_mod, r_faultfile)
        self.create_fault_global_in(a_faultfile)

        #
        # Convert stations to xy
        #
        cmd = "%s >> %s 2>&1" % (self.cfg.A_SLL2XY, self.log)
        bband_utils.runprog(cmd)

        #
        # Run the bb codes
        #
        cmd = "%s >> %s 2>&1" % (self.cfg.A_SYN1D, self.log)
        bband_utils.runprog(cmd, abort_on_error=True)

        # Restore previous directory
        os.chdir(old_cwd)

    def run_stitch(self, a_tmpdir_stitch, a_tmpdir_lf,
                   a_tmpdir_hf, a_velmodel):
        """
        Run the UCSB Stitch code to merge LF and HF seismograms,
        creating BB seismograms
        """
        # Copy velocity model
        r_velmodel = os.path.basename(a_velmodel)
        shutil.copy2(a_velmodel, os.path.join(a_tmpdir_stitch, r_velmodel))

        # Write VMname.list file
        vmname_file = open(os.path.join(a_tmpdir_stitch, "VMname.list"), 'w')
        site_list = self.slo.getStationList()
        for _ in site_list:
            vmname_file.write("%s\n" % (r_velmodel))
        vmname_file.close()

        #
        # Create faultGlobal.in
        #
        r_faultfile = "faultGlobal.in"
        a_faultfile = os.path.join(a_tmpdir_stitch, r_faultfile)
        self.create_fault_global_in(a_faultfile)

        # Copy station list xy to the stitch directory
        r_station_file = "stations.xy"
        shutil.copy2(os.path.join(a_tmpdir_lf, r_station_file),
                     os.path.join(a_tmpdir_stitch, r_station_file))

        # Figure out hypocenter depth
        if self.r_srcfile is not None and self.r_srcfile != "":
            a_srcfile = os.path.join(self.a_indir, self.r_srcfile)
            hypo_dep = fault_utils.calculate_hypo_depth(a_srcfile)
        elif self.r_srffile is not None and self.r_srffile != "":
            sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
            a_srffile = os.path.join(self.a_indir, self.r_srffile)
            hypo_dep = fault_utils.get_hypocenter(a_srffile, sta_base)[2]
        else:
            # No way to determine hypocenter depth, existing
            print("No way to determine hypocenter depth, exiting!")
            sys.exit(1)
        # Write stitch.inp file
        stitch_inp_file = open(os.path.join(a_tmpdir_stitch, "stitch.inp"), 'w')
        stitch_inp_file.write("%s\n" % (r_station_file))
        stitch_inp_file.write("VMname.list\n")
        stitch_inp_file.write("%s%s\n" % (a_tmpdir_hf, os.sep))
        stitch_inp_file.write("%s%s\n" % (a_tmpdir_lf, os.sep))
        # Merging frequency
        stitch_inp_file.write("1.0,  45.0	!merging frequency, fmax\n")
        stitch_inp_file.write("%.7f	!Hypocenter depth\n" % (hypo_dep))
        stitch_inp_file.write("1	!number of sources\n")
        stitch_inp_file.write("2	!displacement (1), velocity (2),"
                              " acceleration (3)\n")
        stitch_inp_file.close()

        # Save old directory
        old_cwd = os.getcwd()
        os.chdir(a_tmpdir_stitch)

        # All good, run the code!
        cmd = "%s >> %s 2>&1" % (self.cfg.A_STITCH, self.log)
        bband_utils.runprog(cmd, abort_on_error=True)

        # Restore old directory
        os.chdir(old_cwd)

    def run(self):
        """
        Runs the UCSB Syn1D simulator
        """
        print("UCSB Syn1D".center(80, '-'))

        #
        # Global installation parameters
        #
        install = InstallCfg.getInstance()
        #
        # Required inputs are sim_id, the src file, the FFSP output
        # and station list
        #
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
        self.log = os.path.join(install.A_OUT_LOG_DIR,
                                str(sim_id),
                                "%d.syn1d_%s.log" % (sim_id, sta_base))

        self.a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        self.a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_tmpdir_lf = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                   "syn1D_lf_%s" % (sta_base))
        a_tmpdir_hf = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                   "syn1D_hf_%s" % (sta_base))
        a_tmpdir_stitch = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                       "stitch_%s" % (sta_base))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))

        #
        # Make sure the output and tmp directories exist
        #
        bband_utils.mkdirs([self.a_tmpdir, a_tmpdir_lf, a_tmpdir_hf,
                            a_tmpdir_stitch, a_outdir],
                           print_cmd=False)

        # Parse SRC file
        a_srcfile = os.path.join(self.a_indir, self.r_srcfile)

        self.cfg = Syn1DCfg(self.vmodel_name, a_srcfile)



        # Read station list
        a_stations = os.path.join(self.a_indir, self.r_stations)
        print(a_stations)
        self.slo = StationList(a_stations)
        site_list = self.slo.getStationList()

        # Make sure syn1D can handle our station list
        if len(site_list) > self.cfg.MAX_STATIONS:
            raise bband_utils.ParameterError("Too many stations in "
                                             "the station list: %d. " %
                                             (len(site_list)) +
                                             "Maximum limit is %d." %
                                             (self.cfg.MAX_STATIONS))

        # Run Syn1D for LF
        print("Running Syn1D for LF...")
        self.run_syn1d(a_tmpdir_lf, self.cfg.A_UC_LF_VELMODEL,
                       self.cfg.A_UC_GREENBANK, self.cfg.A_UC_GREEN_SOIL,
                       self.cfg.A_UC_SYN1D_INP_FILE)

        # Run Syn1D for HF
        print("Running Syn1D for HF...")
        self.run_syn1d(a_tmpdir_hf, self.cfg.A_UC_HF_VELMODEL,
                       self.cfg.A_UC_HF_GREENBANK, self.cfg.A_UC_HF_GREEN_SOIL,
                       self.cfg.A_UC_SYN1D_INP_FILE)

        # Run Stitch to combine LF and HF
        print("Running Stitch...")
        self.run_stitch(a_tmpdir_stitch, a_tmpdir_lf, a_tmpdir_hf,
                        self.cfg.A_UC_LF_VELMODEL)

        #
        # Convert the outputs to BB format
        #

        # Copy station list ll to the stitch directory
        r_station_file = "stations.ll"
        shutil.copy2(os.path.join(a_tmpdir_lf, r_station_file),
                     os.path.join(a_tmpdir_stitch, r_station_file))

        # Save old directory
        old_cwd = os.getcwd()
        os.chdir(a_tmpdir_stitch)

        cmd = "%s >> %s 2>&1" % (self.cfg.A_CONV, self.log)
        bband_utils.runprog(cmd)

        # Restore old directory
        os.chdir(old_cwd)

        #
        # Move the results to the tmpdir directory. Use the stations
        # list to determine the names of the output file the system
        # should have produced.  Define an output name for each
        # station BB file.  Read each line in the file as a station.
        #
        for stat in site_list:
            a_tmpfile = os.path.join(a_tmpdir_stitch, "%s.3comp" % (stat.scode))
            expected_file = os.path.join(self.a_tmpdir,
                                         "%d.%s.bbp" % (sim_id, stat.scode))
            shutil.copy2(a_tmpfile, expected_file)

        if self.r_srcfile == "":
            # calculate magnitude and write to file
            mag = fault_utils.get_magnitude(os.path.join(self.a_indir,
                                                         self.r_velmodel),
                                            os.path.join(self.a_indir,
                                                         self.r_srffile),
                                            sta_base)
            mag_file = open(os.path.join(self.a_indir,
                                         "magnitude_%s" % (sta_base)), 'w')
            mag_file.write("%.2f" % mag)
            mag_file.flush()
            mag_file.close()

        print("UCSB Syn1D Completed".center(80, '-'))
Example #36
0
def load_all_data(comp_label, input_indir, input_obsdir, combined_file,
                  temp_dir, component):
    """
    This function loads all data from each station file
    and creates the structures needed for plotting.
    """
    data = {}

    # Get realizations
    realizations = sorted(os.listdir(input_indir))
    one_realization = realizations[0]
    basedir = os.path.join(input_indir, one_realization)

    # Get the GMPE data for the RZZ2015 metrics
    base_outdir = os.path.join(input_obsdir, one_realization, "validations",
                               "rzz2015_gmpe")
    a_rzz2015_gmpe = glob.glob("%s%s%s.rzz2015gmpe.txt" %
                               (base_outdir, os.sep, one_realization))
    a_rzz2015_gmpe = a_rzz2015_gmpe[0]
    # Get the station list
    a_statfile = glob.glob("%s%s*.stl" % (basedir, os.sep))
    if len(a_statfile) != 1:
        raise bband_utils.ProcessingError("Cannot get station list!")
    a_statfile = a_statfile[0]
    slo = StationList(a_statfile)
    site_list = slo.getStationList()

    # Get source file
    a_srcfile = glob.glob("%s%s*.src" % (basedir, os.sep))
    if len(a_srcfile) != 1:
        raise bband_utils.ProcessingError("Cannot get src file!")
    a_srcfile = a_srcfile[0]

    # Parse it!
    src_keys = bband_utils.parse_src_file(a_srcfile)

    # Go through all stations
    for site in site_list:
        slon = float(site.lon)
        slat = float(site.lat)
        stat = site.scode

        # Calculate Rrup
        origin = (src_keys['lon_top_center'], src_keys['lat_top_center'])
        dims = (src_keys['fault_length'], src_keys['dlen'],
                src_keys['fault_width'], src_keys['dwid'],
                src_keys['depth_to_top'])
        mech = (src_keys['strike'], src_keys['dip'], src_keys['rake'])

        site_geom = [float(site.lon), float(site.lat), 0.0]
        (fault_trace1, up_seis_depth, low_seis_depth, ave_dip, dummy1,
         dummy2) = putils.FaultTraceGen(origin, dims, mech)
        _, rrup, _ = putils.DistanceToSimpleFaultSurface(
            site_geom, fault_trace1, up_seis_depth, low_seis_depth, ave_dip)

        # Read data for this station
        data_file = os.path.join(temp_dir, "%s.rzz2015" % (stat))

        data[stat] = {}
        data[stat]["dist"] = rrup
        data[stat]["r1"] = []
        data[stat]["r2"] = []
        data[stat]["r3"] = []
        data[stat]["r4"] = []
        data[stat]["r5"] = []
        data[stat]["r1_obs"] = None
        data[stat]["r2_obs"] = None
        data[stat]["r3_obs"] = None
        data[stat]["r4_obs"] = None
        data[stat]["r5_obs"] = None
        data[stat]["r1_gmpe"] = None
        data[stat]["r2_gmpe"] = None
        data[stat]["r3_gmpe"] = None
        data[stat]["r4_gmpe"] = None
        data[stat]["r5_gmpe"] = None

        in_file = open(data_file, 'r')
        for line in in_file:
            line = line.strip()
            if line.startswith("#"):
                # Skip comments
                continue
            pieces = line.split(",")
            comp = pieces[1].strip()
            # Check if we want this component
            if component != "both":
                if comp != component:
                    # Skip
                    continue
            # We want this data point
            pieces = pieces[2:]
            pieces = [float(piece) for piece in pieces]
            # Get observation values
            if data[stat]["r1_obs"] is None:
                data[stat]["r1_obs"] = pieces[6]
            if data[stat]["r2_obs"] is None:
                data[stat]["r2_obs"] = pieces[8]
            if data[stat]["r3_obs"] is None:
                data[stat]["r3_obs"] = pieces[10]
            if data[stat]["r4_obs"] is None:
                data[stat]["r4_obs"] = pieces[12]
            if data[stat]["r5_obs"] is None:
                data[stat]["r5_obs"] = pieces[14]
            # Get simulated data values
            data[stat]["r1"].append(pieces[7])
            data[stat]["r2"].append(pieces[9])
            data[stat]["r3"].append(pieces[11])
            data[stat]["r4"].append(pieces[13])
            data[stat]["r5"].append(pieces[15])
        in_file.close()

    gmpe_file = open(a_rzz2015_gmpe, 'r')
    for line in gmpe_file:
        line = line.strip()
        # Skip comments
        if line.startswith("#"):
            continue
        pieces = line.split(",")
        stat = pieces[0].strip()
        pieces = pieces[1:]
        pieces = [float(piece.strip()) for piece in pieces]
        data[stat]["r1_gmpe"] = pieces[2]
        data[stat]["r2_gmpe"] = pieces[3]
        data[stat]["r3_gmpe"] = pieces[2] / pieces[3]
        data[stat]["r4_gmpe"] = pieces[5]
        data[stat]["r5_gmpe"] = pieces[6]
    gmpe_file.close()

    # Return all data
    return data
Example #37
0
    def run(self):
        """
        Runs the UCSB Syn1D simulator
        """
        print("UCSB Syn1D".center(80, '-'))

        #
        # Global installation parameters
        #
        install = InstallCfg.getInstance()
        #
        # Required inputs are sim_id, the src file, the FFSP output
        # and station list
        #
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
        self.log = os.path.join(install.A_OUT_LOG_DIR,
                                str(sim_id),
                                "%d.syn1d_%s.log" % (sim_id, sta_base))

        self.a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        self.a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_tmpdir_lf = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                   "syn1D_lf_%s" % (sta_base))
        a_tmpdir_hf = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                   "syn1D_hf_%s" % (sta_base))
        a_tmpdir_stitch = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                       "stitch_%s" % (sta_base))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))

        #
        # Make sure the output and tmp directories exist
        #
        bband_utils.mkdirs([self.a_tmpdir, a_tmpdir_lf, a_tmpdir_hf,
                            a_tmpdir_stitch, a_outdir],
                           print_cmd=False)

        # Parse SRC file
        a_srcfile = os.path.join(self.a_indir, self.r_srcfile)

        self.cfg = Syn1DCfg(self.vmodel_name, a_srcfile)



        # Read station list
        a_stations = os.path.join(self.a_indir, self.r_stations)
        print(a_stations)
        self.slo = StationList(a_stations)
        site_list = self.slo.getStationList()

        # Make sure syn1D can handle our station list
        if len(site_list) > self.cfg.MAX_STATIONS:
            raise bband_utils.ParameterError("Too many stations in "
                                             "the station list: %d. " %
                                             (len(site_list)) +
                                             "Maximum limit is %d." %
                                             (self.cfg.MAX_STATIONS))

        # Run Syn1D for LF
        print("Running Syn1D for LF...")
        self.run_syn1d(a_tmpdir_lf, self.cfg.A_UC_LF_VELMODEL,
                       self.cfg.A_UC_GREENBANK, self.cfg.A_UC_GREEN_SOIL,
                       self.cfg.A_UC_SYN1D_INP_FILE)

        # Run Syn1D for HF
        print("Running Syn1D for HF...")
        self.run_syn1d(a_tmpdir_hf, self.cfg.A_UC_HF_VELMODEL,
                       self.cfg.A_UC_HF_GREENBANK, self.cfg.A_UC_HF_GREEN_SOIL,
                       self.cfg.A_UC_SYN1D_INP_FILE)

        # Run Stitch to combine LF and HF
        print("Running Stitch...")
        self.run_stitch(a_tmpdir_stitch, a_tmpdir_lf, a_tmpdir_hf,
                        self.cfg.A_UC_LF_VELMODEL)

        #
        # Convert the outputs to BB format
        #

        # Copy station list ll to the stitch directory
        r_station_file = "stations.ll"
        shutil.copy2(os.path.join(a_tmpdir_lf, r_station_file),
                     os.path.join(a_tmpdir_stitch, r_station_file))

        # Save old directory
        old_cwd = os.getcwd()
        os.chdir(a_tmpdir_stitch)

        cmd = "%s >> %s 2>&1" % (self.cfg.A_CONV, self.log)
        bband_utils.runprog(cmd)

        # Restore old directory
        os.chdir(old_cwd)

        #
        # Move the results to the tmpdir directory. Use the stations
        # list to determine the names of the output file the system
        # should have produced.  Define an output name for each
        # station BB file.  Read each line in the file as a station.
        #
        for stat in site_list:
            a_tmpfile = os.path.join(a_tmpdir_stitch, "%s.3comp" % (stat.scode))
            expected_file = os.path.join(self.a_tmpdir,
                                         "%d.%s.bbp" % (sim_id, stat.scode))
            shutil.copy2(a_tmpfile, expected_file)

        if self.r_srcfile == "":
            # calculate magnitude and write to file
            mag = fault_utils.get_magnitude(os.path.join(self.a_indir,
                                                         self.r_velmodel),
                                            os.path.join(self.a_indir,
                                                         self.r_srffile),
                                            sta_base)
            mag_file = open(os.path.join(self.a_indir,
                                         "magnitude_%s" % (sta_base)), 'w')
            mag_file.write("%.2f" % mag)
            mag_file.flush()
            mag_file.close()

        print("UCSB Syn1D Completed".center(80, '-'))
Example #38
0
    def run(self):
        """
        Runs the match module to merge low and high frequency seismograms
        """
        print("Match".center(80, '-'))

        install = InstallCfg.getInstance()
        config = MatchCfg()

        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
        self.log = os.path.join(install.A_OUT_LOG_DIR,
                                str(sim_id),
                                "%d.match_%s.log" % (sim_id, sta_base))

        a_statfile = os.path.join(install.A_IN_DATA_DIR,
                                  str(sim_id),
                                  self.r_stations)

        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))

        # Make sure tmpdir exists
        dirs = [a_tmpdir]
        bband_utils.mkdirs(dirs, print_cmd=False)

        pow2_param = 0
        if self.pow2:
            pow2_param = 1

        # Start with defaults
        self.phase = config.PHASE
        self.hf_fhi = config.HF_FHI
        self.lf_flo = config.LF_FLO

        # Set match method
        if config.MATCH_METHOD == 1:
            self.phase = 1
        elif config.MATCH_METHOD == 2:
            val = 1.0 / (2.0 * config.HF_ORD)
            self.hf_fhi = (self.hf_fhi *
                           math.exp(val * math.log(math.sqrt(2.0) - 1.0)))
            val = -1.0 / (2.0 * config.LF_ORD)
            self.lf_flo = (self.lf_flo *
                           math.exp(val * math.log(math.sqrt(2.0) - 1.0)))

        #
        # Read and parse the station list with this call
        #
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Get pointer to the velocity model object
        vel_obj = velocity_models.get_velocity_model_by_name(self.vmodel_name)
        if vel_obj is None:
            raise bband_utils.ParameterError("Cannot find velocity model: %s" %
                                             (self.vmodel_name))

        # Check for velocity model-specific parameters
        vmodel_params = vel_obj.get_codebase_params('gp')

        # Figure out what DT we should use when resampling

        # Figure out the LF DT value
        if self.acc:
            seis_ext = '.acc.bbp'
        else:
            seis_ext = '.bbp'
        lf_seis = None

        # Find one LF seismogram
        for sites in site_list:
            site = sites.scode
            if os.path.exists(os.path.join(a_tmpdir,
                                           "%d.%s-lf%s" %
                                           (sim_id, site,
                                            seis_ext))):
                lf_seis = os.path.join(a_tmpdir,
                                       "%d.%s-lf%s" %
                                       (sim_id, site,
                                        seis_ext))
                break

        # Need one file
        if lf_seis is None:
            raise bband_utils.ParameterError("Cannot find a LF seismogram")

        # Pick DT from this file
        lf_dt = None
        lf_file = open(lf_seis)
        for line in lf_file:
            line = line.strip()
            if line.startswith("#") or line.startswith("%"):
                continue
            # Got to first timestamp. Now, pick two consecutive
            # timestamps values
            lf_t1 = float(line.strip().split()[0])
            lf_t2 = float(lf_file.next().strip().split()[0])
            # Subtract the two times
            lf_dt = lf_t2 - lf_t1
            # All done!
            break
        lf_file.close()

        if lf_dt is None:
            raise bband_utils.ParameterError("Cannot find LF_DT!")

        # lf_dt *should* match the gf_dt used by jbsim
        #if not 'GF_DT' in vmodel_params:
        #    raise bband_utils.ParameterError("Cannot find GF_DT parameter in "
        #                                     "velocity model: %s" %
        #                                     (self.vmodel_name))
        # In the GP method, we can potentially have two independent DT
        # values, one used by the rupture generator and the
        # low-frequency jbsim seismogram simulator, and another value
        # used by the high-frequency hfsims program. We have to use
        # the smaller of these two values in order to properly combine
        # the low-, and high-frequency seismograms.
        #gf_dt = float(vmodel_params['GF_DT'])
        if 'HF_DT' in vmodel_params:
            hf_dt = float(vmodel_params['HF_DT'])
        else:
            hf_dt = config.NEW_HFDT

        new_dt = min(lf_dt, hf_dt)

        # Go through the stations
        for sites in site_list:
            # Pick station name
            site = sites.scode
            #
            # We have a verbose of silent invocation. This is a very
            # verbose program so our default writes to dev/null
            #

            #
            # There are multiple possibilities; either we have
            # separate HF and LF files, we have HF and .bbp, LF and
            # .bbp, or just .bbp.  In all cases, we need to separate
            # them to get components.
            #
            hf_exists = False
            lf_exists = False

            if not self.acc:
                print("==> Processing velocity seismograms for station: %s" %
                      (site))
                # Need to convert to acc first
                if os.path.exists(os.path.join(a_tmpdir,
                                               "%d.%s-hf.bbp" %
                                               (sim_id, site))):
                    hf_exists = True
                if os.path.exists(os.path.join(a_tmpdir,
                                               "%d.%s-lf.bbp" %
                                               (sim_id, site))):
                    lf_exists = True

                # If no files exist for this station, make a note and continue
                if not hf_exists and not lf_exists:
                    print("===> No velocity seismograms found!")
                    print("===> Skipping station...")
                    continue

                # First process HF files to convert velocity to acceleration

                # Create path names and check if their sizes are
                # within bounds
                nsfile = os.path.join(a_tmpdir,
                                      "%d.%s-hf.000" % (sim_id, site))
                ewfile = os.path.join(a_tmpdir,
                                      "%d.%s-hf.090" % (sim_id, site))
                udfile = os.path.join(a_tmpdir,
                                      "%d.%s-hf.ver" % (sim_id, site))
                bbpfile = os.path.join(a_tmpdir,
                                       "%d.%s-hf.bbp" % (sim_id, site))

                bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                               bband_utils.GP_MAX_FILENAME)

                # Run wcc2bbp
                cmd = ("%s " %
                       (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                       "nsfile=%s ewfile=%s udfile=%s " %
                       (nsfile, ewfile, udfile) +
                       "wcc2bbp=0 < %s >> %s 2>&1" %
                       (bbpfile, self.log))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

                for comp in config.COMPS:
                    # Create path names and check if their sizes
                    # are within bounds
                    filein = os.path.join(a_tmpdir,
                                          "%d.%s-hf.%s" %
                                          (sim_id, site, comp))
                    fileout = os.path.join(a_tmpdir,
                                           "%d.%s-hf.acc.%s" %
                                           (sim_id, site, comp))

                    bband_utils.check_path_lengths([filein, fileout],
                                                   bband_utils.GP_MAX_FILENAME)

                    cmd = ("%s diff=1 " %
                           (os.path.join(install.A_GP_BIN_DIR,
                                         "integ_diff")) +
                           "filein=%s fileout=%s" % (filein, fileout))
                    bband_utils.runprog(cmd, abort_on_error=True,
                                        print_cmd=False)

                # Create path names and check if their sizes are within bounds
                nsfile = os.path.join(a_tmpdir,
                                      "%d.%s-hf.acc.000" % (sim_id, site))
                ewfile = os.path.join(a_tmpdir,
                                      "%d.%s-hf.acc.090" % (sim_id, site))
                udfile = os.path.join(a_tmpdir,
                                      "%d.%s-hf.acc.ver" % (sim_id, site))
                bbpfile = os.path.join(a_tmpdir,
                                       "%d.%s-hf.acc.bbp" % (sim_id, site))

                bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s " %
                       (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                       "nsfile=%s ewfile=%s udfile=%s " %
                       (nsfile, ewfile, udfile) +
                       "units=cm/s/s wcc2bbp=1 > %s 2>> %s" %
                       (bbpfile, self.log))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

                # Then process LF files to convert velocity to acceleration

                # Create path names and check if their sizes are within bounds
                nsfile = os.path.join(a_tmpdir,
                                      "%d.%s-lf.000" % (sim_id, site))
                ewfile = os.path.join(a_tmpdir,
                                      "%d.%s-lf.090" % (sim_id, site))
                udfile = os.path.join(a_tmpdir,
                                      "%d.%s-lf.ver" % (sim_id, site))
                bbpfile = os.path.join(a_tmpdir,
                                       "%d.%s-lf.bbp" % (sim_id, site))

                bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s " %
                       (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                       "nsfile=%s ewfile=%s udfile=%s " %
                       (nsfile, ewfile, udfile) +
                       "wcc2bbp=0 < %s >> %s 2>&1" %
                       (bbpfile, self.log))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

                for comp in config.COMPS:
                    # Create path names and check if their sizes
                    # are within bounds
                    filein = os.path.join(a_tmpdir,
                                          "%d.%s-lf.%s" %
                                          (sim_id, site, comp))
                    fileout = os.path.join(a_tmpdir,
                                           "%d.%s-lf.acc.%s" %
                                           (sim_id, site, comp))

                    bband_utils.check_path_lengths([filein, fileout],
                                                   bband_utils.GP_MAX_FILENAME)

                    cmd = ("%s " %
                           (os.path.join(install.A_GP_BIN_DIR,
                                         "integ_diff")) +
                           "diff=1 filein=%s fileout=%s" %
                           (filein, fileout))
                    bband_utils.runprog(cmd, abort_on_error=True,
                                        print_cmd=False)

                # Create path names and check if their sizes are within bounds
                nsfile = os.path.join(a_tmpdir,
                                      "%d.%s-lf.acc.000" % (sim_id, site))
                ewfile = os.path.join(a_tmpdir,
                                      "%d.%s-lf.acc.090" % (sim_id, site))
                udfile = os.path.join(a_tmpdir,
                                      "%d.%s-lf.acc.ver" % (sim_id, site))
                bbpfile = os.path.join(a_tmpdir,
                                       "%d.%s-lf.acc.bbp" % (sim_id, site))

                bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s " %
                       (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                       "nsfile=%s ewfile=%s udfile=%s " %
                       (nsfile, ewfile, udfile) +
                       "units=cm/s/s wcc2bbp=1 > %s 2>> %s" %
                       (bbpfile, self.log))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # We should have acceleration files at this point
            hf_exists = False
            lf_exists = False

            if os.path.exists(os.path.join(a_tmpdir,
                                           "%d.%s-hf.acc.bbp" %
                                           (sim_id, site))):
                hf_exists = True
            if os.path.exists(os.path.join(a_tmpdir,
                                           "%d.%s-lf.acc.bbp" %
                                           (sim_id, site))):
                lf_exists = True

            print("==> Processing acceleration seismograms for station: %s" %
                  (site))

            # If no files exist for this station, make a note and continue
            if not hf_exists and not lf_exists:
                print("===> No acceleration seismograms found!")
                print("===> Skipping station...")
                continue

            #
            # Convert HF file to wcc components
            #

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s-hf.acc.000" % (sim_id, site))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s-hf.acc.090" % (sim_id, site))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s-hf.acc.ver" % (sim_id, site))
            bbpfile = os.path.join(a_tmpdir,
                                   "%d.%s-hf.acc.bbp" % (sim_id, site))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            progstring = ("%s " %
                          (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                          "nsfile=%s ewfile=%s udfile=%s " %
                          (nsfile, ewfile, udfile) +
                          "wcc2bbp=0 < %s >> %s 2>&1" %
                          (bbpfile, self.log))
            bband_utils.runprog(progstring, abort_on_error=True,
                                print_cmd=False)

            #
            # Convert LF file to wcc components
            #

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s-lf.acc.000" % (sim_id, site))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s-lf.acc.090" % (sim_id, site))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s-lf.acc.ver" % (sim_id, site))
            bbpfile = os.path.join(a_tmpdir,
                                   "%d.%s-lf.acc.bbp" % (sim_id, site))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            progstring = ("%s " %
                          (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                          "nsfile=%s ewfile=%s udfile=%s " %
                          (nsfile, ewfile, udfile) +
                          "wcc2bbp=0 < %s >> %s 2>&1" %
                          (bbpfile, self.log))
            bband_utils.runprog(progstring, abort_on_error=True,
                                print_cmd=False)

            #
            # Process each component
            #
            for entries in config.COMPS:
                compo = entries

                #
                # HF First
                #
                listfile = os.path.join(a_tmpdir, "%s.%s.hf.%s" %
                                        (config.FILTLIST, sta_base, compo))
                bband_utils.check_path_lengths([listfile],
                                               bband_utils.GP_MAX_FILENAME)

                # Create wcc_tfilter input file
                out = open(listfile, 'w')
                # Contains HF input file
                infile = os.path.join(a_tmpdir,
                                      "%d.%s-hf.acc.%s" %
                                      (sim_id, site, compo))
                out.write("%s\n" % infile)
                out.flush()
                out.close()

                # Also check infile
                bband_utils.check_path_lengths([infile],
                                               bband_utils.GP_MAX_FILENAME)

                #
                # Pre-filter and resample HF file
                #
                shutil.copy2(infile, "%s.prefilter" % infile)
                progstring = ("%s " %
                              (os.path.join(install.A_GP_BIN_DIR,
                                            "wcc_tfilter")) +
                              "filelist=%s order=%d fhi=%f flo=%s " %
                              (listfile, config.HF_ORD, self.hf_fhi,
                               config.HF_FLO) +
                              "inbin=0 outbin=0 phase=%d " %
                              (self.phase) +
                              "outpath=%s >> %s 2>&1" %
                              (a_tmpdir, self.log))
                bband_utils.runprog(progstring, abort_on_error=True,
                                    print_cmd=False)

                outfile = os.path.join(a_tmpdir, "%d.%s-hf-resamp.%s" %
                                       (sim_id, site, compo))
                bband_utils.check_path_lengths([outfile],
                                               bband_utils.GP_MAX_FILENAME)

                progstring = ("%s newdt=%f " %
                              (os.path.join(install.A_GP_BIN_DIR,
                                            "wcc_resamp_arbdt"), new_dt) +
                              "pow2=%d infile=%s outfile=%s >> %s 2>&1" %
                              (pow2_param, infile, outfile, self.log))
                bband_utils.runprog(progstring, abort_on_error=True,
                                    print_cmd=False)

                #
                # LF Next
                #
                listfile = os.path.join(a_tmpdir, "%s.%s.lf.%s" %
                                        (config.FILTLIST, sta_base, compo))
                bband_utils.check_path_lengths([listfile],
                                               bband_utils.GP_MAX_FILENAME)

                # Create wcc_tfilter input file
                out = open(listfile, 'w')
                # Contains LF input file
                infile = os.path.join(a_tmpdir,
                                      "%d.%s-lf.acc.%s" %
                                      (sim_id, site, compo))
                out.write("%s\n" % infile)
                out.flush()
                out.close()

                # Also check infile
                bband_utils.check_path_lengths([infile],
                                               bband_utils.GP_MAX_FILENAME)

                #
                # Pre-filter and resample LF file
                #
                shutil.copy2(infile, "%s.prefilter" % infile)
                if not self.using_3d:
                    progstring = ("%s " %
                                  (os.path.join(install.A_GP_BIN_DIR,
                                                "wcc_tfilter")) +
                                  "filelist=%s order=%d fhi=%f flo=%s " %
                                  (listfile, config.LF_ORD, config.LF_FHI,
                                   self.lf_flo) +
                                  "inbin=0 outbin=0 phase=%d " %
                                  (self.phase) +
                                  "outpath=%s >> %s 2>&1 " %
                                  (a_tmpdir, self.log))
                    bband_utils.runprog(progstring, print_cmd=False)

                outfile = os.path.join(a_tmpdir, "%d.%s-lf-resamp.%s" %
                                       (sim_id, site, compo))
                bband_utils.check_path_lengths([outfile],
                                               bband_utils.GP_MAX_FILENAME)

                progstring = ("%s " %
                              (os.path.join(install.A_GP_BIN_DIR,
                                            "wcc_resamp_arbdt")) +
                              "newdt=%f pow2=%d " %
                              (new_dt, pow2_param) +
                              "infile=%s outfile=%s >> %s 2>&1" %
                              (infile, outfile, self.log))
                bband_utils.runprog(progstring, abort_on_error=True,
                                    print_cmd=False)

                #
                # Add LF and HF resampled acc seismograms
                #

                # Check all path lengths
                infile1 = os.path.join(a_tmpdir, "%d.%s-lf-resamp.%s" %
                                       (sim_id, site, compo))
                infile2 = os.path.join(a_tmpdir, "%d.%s-hf-resamp.%s" %
                                       (sim_id, site, compo))
                outfile = os.path.join(a_tmpdir, "%d.%s.acc.add.%s" %
                                       (sim_id, site, compo))
                bband_utils.check_path_lengths([infile1, infile2, outfile],
                                               bband_utils.GP_MAX_FILENAME)

                progstring = ("%s " %
                              (os.path.join(install.A_GP_BIN_DIR, "wcc_add")) +
                              "f1=1.00 t1=%f inbin1=0 infile1=%s " %
                              (config.LF_TSTART, infile1) +
                              "f2=1.00 t2=%f inbin2=0 infile2=%s " %
                              (config.HF_TSTART, infile2) +
                              "outbin=0 outfile=%s >> %s 2>&1" %
                              (outfile, self.log))
                bband_utils.runprog(progstring, abort_on_error=True,
                                    print_cmd=False)

                #
                # Create combined velocity files
                #

                # Check path lengths
                filein = os.path.join(a_tmpdir,
                                      "%d.%s.acc.add.%s" %
                                      (sim_id, site, compo))
                fileout = os.path.join(a_tmpdir,
                                       "%d.%s.%s" %
                                       (sim_id, site, compo))
                bband_utils.check_path_lengths([filein, fileout],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s integ=1 filein=%s fileout=%s" %
                       (os.path.join(install.A_GP_BIN_DIR,
                                     "integ_diff"), filein, fileout))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # We have all the component files, create velocity seismogram

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s.000" % (sim_id, site))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s.090" % (sim_id, site))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s.ver" % (sim_id, site))
            bbpfile = os.path.join(a_tmpdir,
                                   "%d.%s.bbp" % (sim_id, site))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            progstring = ("%s wcc2bbp=1 " %
                          (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                          'title="Sim NGAH, stat=%s" ' % site +
                          'nsfile=%s ewfile=%s udfile=%s > %s 2>> %s' %
                          (nsfile, ewfile, udfile, bbpfile, self.log))
            bband_utils.runprog(progstring, abort_on_error=True,
                                print_cmd=False)

            # Copy velocity bbp file to outdir
            shutil.copy2(os.path.join(a_tmpdir, "%d.%s.bbp" %
                                      (sim_id, site)),
                         os.path.join(a_outdir, "%d.%s.vel.bbp" %
                                      (sim_id, site)))

            # Also create acceleration bbp file in outdir

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s.000" % (sim_id, site))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s.090" % (sim_id, site))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s.ver" % (sim_id, site))
            bbpfile = os.path.join(a_tmpdir,
                                   "%d.%s.bbp" % (sim_id, site))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                   "nsfile=%s ewfile=%s udfile=%s " %
                   (nsfile, ewfile, udfile) +
                   "wcc2bbp=0 < %s >> %s 2>&1" %
                   (bbpfile, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            for comp in config.COMPS:
                # Create path names and check if their sizes are within bounds
                filein = os.path.join(a_tmpdir,
                                      "%d.%s.%s" %
                                      (sim_id, site, comp))
                fileout = os.path.join(a_tmpdir,
                                       "%d.%s.acc.%s" %
                                       (sim_id, site, comp))

                bband_utils.check_path_lengths([filein, fileout],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s diff=1 filein=%s fileout=%s" %
                       (os.path.join(install.A_GP_BIN_DIR,
                                     "integ_diff"), filein, fileout))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.000" % (sim_id, site))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.090" % (sim_id, site))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.ver" % (sim_id, site))
            bbpfile = os.path.join(a_tmpdir,
                                   "%d.%s.acc.bbp" % (sim_id, site))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                   "nsfile=%s ewfile=%s udfile=%s " %
                   (nsfile, ewfile, udfile) +
                   "units=cm/s/s wcc2bbp=1 > %s 2>> %s" %
                   (bbpfile, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # Copy acceleration bbp file to outdir
            shutil.copy2(os.path.join(a_tmpdir, "%d.%s.acc.bbp" %
                                      (sim_id, site)),
                         os.path.join(a_outdir, "%d.%s.acc.bbp" %
                                      (sim_id, site)))

        print("Match Completed".center(80, '-'))
Example #39
0
    def run(self):
        """
        This function creates GMPE plots for all stations
        """
        print("GMPE Plot".center(80, '-'))

        # Initialize basic variables
        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id),
                                "%d.gmpe_gof.log" % (sim_id))

        # Input, tmp, and output directories
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_outdir_gmpe = os.path.join(install.A_OUT_DATA_DIR, str(sim_id),
                                     "gmpe_data_%s" % (sta_base))

        #
        # Make sure the output and tmp directories exist
        #
        dirs = [a_tmpdir, a_outdir, a_outdir_gmpe]
        bband_utils.mkdirs(dirs, print_cmd=False)

        # Figure out gmpe labels
        gmpe_group = gmpe_config.GMPES[self.gmpe_group_name]
        gmpe_labels = gmpe_group["labels"]

        # Station file
        a_statfile = os.path.join(install.A_IN_DATA_DIR,
                                  str(sim_id),
                                  self.r_stations)
        # List of gmpe files
        filelist = os.listdir(a_outdir_gmpe)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Go through each station
        for site in site_list:
            stat = site.scode
            print("==> Generating plot for station: %s" % (stat))
            # Since we're using the GP station list, make sure the
            # .rd50 for the station exists.  It might not if we ran the
            # validation with a shorter station list
            sim_file = os.path.join(a_outdir, "%d.%s.rd50" % (sim_id, stat))
            if not os.path.exists(sim_file):
                # just skip it
                print("Couldn't find file %s. " % (sim_file) +
                      "This is not necessarily an error, as you may have " +
                      "run with a subset of a stations. Continuing " +
                      "with available stations.")
                continue

            # Ok, we have the calculated rd50 for this station
            # Look for the gmpe file
            r_gmpe_file = "%s-gmpe.ri50" % (stat)
            if r_gmpe_file not in filelist:
                # No gmpe file for this station
                continue
            a_gmpe_file = os.path.join(a_outdir_gmpe, r_gmpe_file)

            # Plot GMPE rotd50 results
            outfile = os.path.join(a_outdir, "%s_%d_%s_gmpe.png" %
                                   (self.comp_label, sim_id, stat))

            plot_gmpe.plot_gmpe(stat, sim_file, a_gmpe_file, gmpe_labels,
                                sim_id, self.comp_label, outfile)

        print("GMPE Plot Completed".center(80, '-'))
Example #40
0
    def calculate_simulated(self, a_statfile, a_tmpdir, a_outdir, a_dstdir):
        """
        This function calculates the RotD100/RotD50 values for the
        computed seismograms
        """
        install = install_cfg.InstallCfg.getInstance()
        sim_id = self.sim_id
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        for site in site_list:
            stat = site.scode
            print("==> Calculating simulation RotD100 for station: %s" %
                  (stat))
            # Since we have velocity files, we need to differentiate
            # to get to acceleration

            # Create path names and check if their sizes are within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s.000" % (sim_id, stat))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s.090" % (sim_id, stat))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s.ver" % (sim_id, stat))
            bbpfile = os.path.join(a_outdir,
                                   "%d.%s.vel.bbp" % (sim_id, stat))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                   "wcc2bbp=0 nsfile=%s ewfile=%s udfile=%s < %s >> %s 2>&1" %
                   (nsfile, ewfile, udfile, bbpfile, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            for c in ["090", "000", "ver"]:
                # Differentiate to get from velocity to accl needed by rotd100
                # Create path names and check if their sizes are within bounds
                filein = os.path.join(a_tmpdir,
                                      "%d.%s.%s" %
                                      (sim_id, stat, c))
                fileout = os.path.join(a_tmpdir,
                                       "%d.%s.acc.%s" %
                                       (sim_id, stat, c))

                bband_utils.check_path_lengths([filein, fileout],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s diff=1 " %
                       (os.path.join(install.A_GP_BIN_DIR, "integ_diff")) +
                       "filein=%s fileout=%s >> %s 2>&1" %
                       (filein, fileout, self.log))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

                # Check file length
                bband_utils.check_path_lengths(["%s/%d.%s.acc.%s" %
                                                (a_tmpdir, sim_id, stat, c)],
                                               bband_utils.GP_MAX_FILENAME)

            # Now we need to convert them back to bbp
            # Create path names and check if their sizes are
            # within bounds
            nsfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.000" % (sim_id, stat))
            ewfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.090" % (sim_id, stat))
            udfile = os.path.join(a_tmpdir,
                                  "%d.%s.acc.ver" % (sim_id, stat))
            bbpfile = os.path.join(a_tmpdir,
                                   "%d.%s.acc.bbp" % (sim_id, stat))

            bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s " % (os.path.join(install.A_GP_BIN_DIR, "wcc2bbp")) +
                   "nsfile=%s ewfile=%s udfile=%s " %
                   (nsfile, ewfile, udfile) +
                   "units=cm/s/s wcc2bbp=1 > %s 2>> %s" %
                   (bbpfile, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # Now we need to convert to peer format
            out_n_acc = os.path.join(a_tmpdir,
                                     "%d.%s.peer_n.acc" % (sim_id, stat))
            out_e_acc = os.path.join(a_tmpdir,
                                     "%d.%s.peer_e.acc" % (sim_id, stat))
            out_z_acc = os.path.join(a_tmpdir,
                                     "%d.%s.peer_z.acc" % (sim_id, stat))
            bbp_formatter.bbp2peer(bbpfile, out_n_acc, out_e_acc, out_z_acc)

            # Let's have rotD100 create these output files
            out_rotd100_base = "%d.%s.rd100" % (sim_id, stat)
            tmp_rotd100 = os.path.join(a_tmpdir, out_rotd100_base)
            out_rotd100 = os.path.join(a_dstdir, out_rotd100_base)

            # Run the rotD100 program
            self.do_rotd100(a_tmpdir, out_e_acc, out_n_acc, out_z_acc,
                            out_rotd100, self.log)

            cmd = "cp %s %s" % (tmp_rotd100, out_rotd100)
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)
Example #41
0
    def run(self):
        """
        Calculate GMPEs, create bias plot comparisons
        """
        print("GMPE Comparison".center(80, '-'))

        # Initialize basic variables
        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        # Input, tmp, and output directories
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_tmpdir_seis = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                     "obs_seis_%s" % (sta_base))
        a_outdir_gmpe = os.path.join(install.A_OUT_DATA_DIR, str(sim_id),
                                     "gmpe_data_%s" % (sta_base))
        a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))

        self.log = os.path.join(a_logdir, "%d.gmpe_compare.log" % (sim_id))

        #
        # Make sure the output and tmp directories exist
        #
        dirs = [a_tmpdir, a_tmpdir_seis, a_outdir_gmpe, a_outdir, a_logdir]
        bband_utils.mkdirs(dirs, print_cmd=False)

        # Source file, parse it!
        a_srcfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
                                 self.r_src_file)
        self.src_keys = bband_utils.parse_src_file(a_srcfile)

        # Station file
        a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
                                  self.r_stations)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Go through each station, and print comparison headers for
        # the first station we process
        print_headers = True
        gmpe_models = []
        for site in site_list:
            stat = site.scode
            obs_file = os.path.join(a_tmpdir_seis, "%s.rd50" % (stat))
            gmpe_file = os.path.join(a_outdir_gmpe, "%s-gmpe.ri50" % (stat))
            # Skip station if we don't have observation file
            if not os.access(obs_file, os.R_OK):
                continue
            gmpe_data, gmpe_models[:] = self.read_gmpe(gmpe_file)
            obs_periods, obs_data = self.read_rotd50(obs_file)

            # Loop through the NGA methods
            for gmpe_model in gmpe_models:
                resid_file = os.path.join(
                    a_outdir_gmpe,
                    "%s-%d.resid.txt" % (gmpe_model.lower(), sim_id))
                period_set = self.calculate_residuals(site, gmpe_model,
                                                      gmpe_data, obs_periods,
                                                      obs_data, resid_file,
                                                      print_headers)
            print_headers = False

        for gmpe_model in gmpe_models:
            # Now call the resid2uncer_varN program to summarize the
            # residuals and create the files needed for the GOF plot
            resid_file = os.path.join(
                a_outdir_gmpe,
                "%s-%d.resid.txt" % (gmpe_model.lower(), sim_id))
            fileroot = os.path.join(
                a_outdir, "%s-GMPE-%d_r%d-all-rd50-%s" %
                (self.comp_label, sim_id, 0, gmpe_model.lower()))
            cmd = ("%s/resid2uncer_varN " % (install.A_GP_BIN_DIR) +
                   "residfile=%s fileroot=%s " % (resid_file, fileroot) +
                   "comp=%s nstat=%d nper=%d " %
                   (gmpe_model.lower(), len(site_list), len(period_set)) +
                   "min_cdst=%d >> %s 2>&1" % (0, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

        # Plot GOF plot
        gmpe_group = gmpe_config.GMPES[self.gmpe_group_name]
        gmpe_labels = gmpe_group["labels"]
        plotter = PlotGoF()
        plottitle = "Comparison between GMPEs and %s" % (self.comp_label)
        fileroot = "%s-GMPE-%d_r%d-all-rd50-" % (self.comp_label, sim_id, 0)
        dataroot = [
            "%s%s" % (fileroot, model.lower()) for model in gmpe_models
        ]
        plotter.multi_plot(plottitle, dataroot, a_outdir, a_outdir,
                           gmpe_labels, len(site_list))

        print("GMPE Comparison Completed".center(80, '-'))
Example #42
0
    def run(self):
        """
        Generate an index file in the outdata directory
        """
        print("GenHTML".center(80, '-'))

        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id),
                                "%d.genhtml.log" % (sim_id))
        a_statfile = os.path.join(a_indir, self.r_stations)
        a_param_outdir = os.path.join(a_outdir, "param_files")
        a_param_statfile = os.path.join(a_param_outdir, self.r_stations)
        if self.r_src_file is not None and self.r_src_file != "":
            a_src_file = os.path.join(a_indir, self.r_src_file)
            a_param_srcfile = os.path.join(a_param_outdir, self.r_src_file)
            src_props = bband_utils.parse_properties(a_src_file)
            if "seed" in src_props:
                seed = src_props["seed"]
            else:
                seed = "not available"
        else:
            a_src_file = None
            a_param_srcfile = None

        # Make sure tmpdir, outdir exist
        dirs = [a_tmpdir, a_outdir, a_param_outdir]
        bband_utils.mkdirs(dirs, print_cmd=False)

        # Copy station list, srf_file to outdir's param_files directory
        shutil.copy2(a_statfile, a_param_statfile)
        if a_param_srcfile is not None:
            shutil.copy2(a_src_file, a_param_srcfile)

        # Get pointer to the velocity model object
        vel_obj = velocity_models.get_velocity_model_by_name(self.vmodel_name)
        if vel_obj is None:
            raise bband_utils.ParameterError("Cannot find velocity model: %s" %
                                             (self.vmodel_name))
        vel_version = ("%s - %s" % (vel_obj.get_name(), vel_obj.get_version()))

        # Get pointer to validation object, if any
        val_version = None
        if self.val_name:
            val_obj = validation_cfg.VE_EVENTS.get_event_by_name(self.val_name)
            if val_obj is not None:
                val_version = ("%s - %s" % (val_obj.get_print_name(),
                                            val_obj.get_version()))

        #
        # Read and parse the station list with this call
        #
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        index_file = os.path.join(a_outdir, "index-%d.html" % (sim_id))
        idxout = open(index_file, 'w')
        idxout.write("<html>\n")
        idxout.write("<title>Results for simulation %d</title>\n" % (sim_id))
        idxout.write("<body>\n")
        idxout.write("<h2>Simulation Results</h2>\n")
        idxout.write("<table>\n")
        idxout.write("<tr>\n")
        idxout.write("<td>Broadband Version</td>\n")
        idxout.write("<td>%s</td>\n" % (install.VERSION))
        idxout.write("</tr>\n")
        idxout.write("<tr>\n")
        idxout.write("<td>Velocity model version</td>\n")
        idxout.write("<td>%s</td>\n" % (vel_version))
        idxout.write("</tr>\n")
        if val_version:
            idxout.write("<tr>\n")
            idxout.write("<td>Validation package version</td>\n")
            idxout.write("<td>%s</td>\n" % (val_version))
            idxout.write("</tr>\n")
        if install.start_time is not None:
            idxout.write("<tr>\n")
            idxout.write("<td>Simulation Start Time</td>\n")
            idxout.write("<td>%s</td>\n" %
                         (time.strftime("%a %d %b %Y %X %Z",
                                        install.start_time)))
            idxout.write("</tr>\n")
        idxout.write("<tr>\n")
        idxout.write("<td>Simulation End Time</td>\n")
        idxout.write("<td>%s</td>\n" %
                     (time.strftime("%a %d %b %Y %X %Z",
                                    time.localtime())))
        idxout.write("</tr>\n")
        idxout.write("<tr>\n")
        idxout.write("<td>Simulation ID</td>\n")
        idxout.write("<td>%d</td>\n" % (sim_id))
        idxout.write("</tr>\n")
        idxout.write("<tr>\n")
        idxout.write("<td>Simulation Method</td>\n")
        idxout.write("<td>%s</td>\n" % (self.method))
        idxout.write("</tr>\n")
        # Add xml file
        if os.path.exists(os.path.join(a_outdir, "%d.xml" % (sim_id))):
            idxout.write("<tr>\n")
            idxout.write("<td>Sim Spec</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "%d.xml" % (sim_id)),
                          "%d.xml" % (sim_id)))
            idxout.write("</tr>\n")
        # Add station list and src_file
        if os.path.exists(os.path.join(a_param_outdir, self.r_stations)):
            idxout.write("<tr>\n")
            idxout.write("<td>Station List</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "param_files", self.r_stations),
                          self.r_stations))
            idxout.write("</tr>\n")
        if a_param_srcfile is not None:
            if os.path.exists(os.path.join(a_param_outdir, self.r_src_file)):
                idxout.write("<tr>\n")
                idxout.write("<td>Source Description</td>\n")
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".",
                                           "param_files",
                                           self.r_src_file),
                              self.r_src_file))
                idxout.write("</tr>\n")
                idxout.write("<tr>\n")
                idxout.write("<td>Random Seed</td>\n")
                idxout.write('<td>%s</td>\n' % (seed))
                idxout.write("</tr>\n")
        # Get bias plots
        dist_lin_plot = glob.glob(os.path.join(a_outdir, "gof-dist-lin*.png"))
        dist_log_plot = glob.glob(os.path.join(a_outdir, "gof-dist-log*.png"))
        plots = glob.glob(os.path.join(a_outdir, "gof*.png"))
        rd50plot = glob.glob(os.path.join(a_outdir, "gof*-rd50.png"))
        gmpegofplot = glob.glob(os.path.join(a_outdir, "gof*-GMPE-*.png"))
        mapgofplot = glob.glob(os.path.join(a_outdir, "gof-map-*.png"))
        if len(gmpegofplot) == 1:
            gmpegofplot = gmpegofplot[0]
        else:
            gmpegofplot = ""
        if len(mapgofplot) == 1:
            mapgofplot = mapgofplot[0]
        else:
            mapgofplot = ""
        if len(dist_lin_plot) == 1:
            dist_lin_plot = dist_lin_plot[0]
        else:
            dist_lin_plot = ""
        if len(dist_log_plot) == 1:
            dist_log_plot = dist_log_plot[0]
        else:
            dist_log_plot = ""
        if len(rd50plot) == 1:
            rd50plot = rd50plot[0]
        else:
            if gmpegofplot:
                rd50plot = [plot for plot in rd50plot if plot != gmpegofplot]
            if mapgofplot:
                rd50plot = [plot for plot in rd50plot if plot != mapgofplot]
            if dist_lin_plot:
                rd50plot = [plot for plot in rd50plot if plot != dist_lin_plot]
            if dist_log_plot:
                rd50plot = [plot for plot in rd50plot if plot != dist_log_plot]
            if len(rd50plot) == 1:
                rd50plot = rd50plot[0]
            else:
                rd50plot = ""
        if len(plots) > 1:
            rspplot = [plot for plot in plots if (plot != rd50plot and
                                                  plot != gmpegofplot and
                                                  plot != mapgofplot and
                                                  plot != dist_lin_plot and
                                                  plot != dist_log_plot)]
            if len(rspplot) == 1:
                rspplot = rspplot[0]
            else:
                rspplot = ""
        else:
            rspplot = ""
        gmpegofplot = os.path.basename(gmpegofplot)
        mapgofplot = os.path.basename(mapgofplot)
        rd50plot = os.path.basename(rd50plot)
        rspplot = os.path.basename(rspplot)
        dist_lin_plot = os.path.basename(dist_lin_plot)
        dist_log_plot = os.path.basename(dist_log_plot)

        # Add RotD50 bias plot
        if rd50plot:
            idxout.write("<tr>\n")
            idxout.write("<td>RotD50 Bias Plot</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "%s" % (rd50plot)),
                          "PNG"))
            idxout.write("</tr>\n")
        if mapgofplot:
            idxout.write("<tr>\n")
            idxout.write("<td>RotD50 Map GOF Plot</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "%s" % (mapgofplot)),
                          "PNG"))
            idxout.write("</tr>\n")
        # Add RSP bias plot
        if rspplot:
            idxout.write("<tr>\n")
            idxout.write("<td>Respect Bias Plot</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "%s" % (rspplot)),
                          "PNG"))
            idxout.write("</tr>\n")
        # Add the GMPE bias plot
        if gmpegofplot:
            idxout.write("<tr>\n")
            idxout.write("<td>GMPE Comparison Bias Plot</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "%s" % (gmpegofplot)),
                          "PNG"))
            idxout.write("</tr>\n")
        # Add distance plots
        if dist_lin_plot:
            idxout.write("<tr>\n")
            idxout.write("<td>RotD50 Dist Bias Linear</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "%s" % (dist_lin_plot)),
                          "PNG"))
            idxout.write("</tr>\n")
        if dist_log_plot:
            idxout.write("<tr>\n")
            idxout.write("<td>RotD50 Dist Bias Log</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "%s" % (dist_log_plot)),
                          "PNG"))
            idxout.write("</tr>\n")
        # Add station map and kml file
        if os.path.exists(os.path.join(a_outdir, "station_map.png")):
            idxout.write("<tr>\n")
            idxout.write("<td>Station Map</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", "station_map.png"),
                          "PNG"))
            if os.path.exists(os.path.join(a_outdir, "station_map.kml")):
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", "station_map.kml"),
                              "KML"))
            idxout.write("</tr>\n")
        # Now get SRF file and plot
        srfs = glob.glob(os.path.join(a_outdir, "*.srf"))
        if len(srfs) == 1:
            srffile = os.path.basename(srfs[0])
            srfplot = ("%s.png" %
                       (os.path.basename(os.path.splitext(srffile)[0])))
            if not os.path.exists(os.path.join(a_outdir, srfplot)):
                srfplot = ""
        else:
            srffile = ""
            srfplot = ""
        if srffile:
            idxout.write("<tr>\n")
            idxout.write("<td>Rupture file</td>\n")
            idxout.write('<td><a href="%s">%s</a></td>\n' %
                         (os.path.join(".", srffile),
                          "data"))
            if srfplot:
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", srfplot),
                              "PNG"))
            idxout.write("</tr>\n")
        idxout.write("</table>\n")
        idxout.write("<p><p>\n")

        for sits in site_list:
            site = sits.scode
            idxout.write("<p>\n")
            idxout.write("<h2>%s</h2>\n" % (site))
            idxout.write("<table>\n")
            # Find all files
            velfile = "%d.%s.vel.bbp" % (sim_id, site)
            velplot = "%d.%s_velocity_seis.png" % (sim_id, site)
            accfile = "%d.%s.acc.bbp" % (sim_id, site)
            accplot = "%d.%s_acceleration_seis.png" % (sim_id, site)
            rd50file = "%d.%s.rd50" % (sim_id, site)
            rspfile = "%d.%s.rsp" % (sim_id, site)
            rd50plot = glob.glob(os.path.join(a_outdir,
                                              "*_%d_%s_rotd50.png" %
                                              (sim_id, site)))
            if len(rd50plot) == 1:
                rd50plot = os.path.basename(rd50plot[0])
            else:
                rd50plot = ""
            rspplot = glob.glob(os.path.join(a_outdir,
                                             "*_%d_%s_rsp.png" %
                                             (sim_id, site)))
            if len(rspplot) == 1:
                rspplot = os.path.basename(rspplot[0])
            else:
                rspplot = ""
            overlayfile = glob.glob(os.path.join(a_outdir,
                                                 "*_%d_%s_overlay.png" %
                                                 (sim_id, site)))
            if len(overlayfile) == 1:
                overlayfile = os.path.basename(overlayfile[0])
            else:
                overlayfile = ""
            gmpeplot = glob.glob(os.path.join(a_outdir,
                                              "*_%d_%s_gmpe.png" %
                                              (sim_id, site)))
            if len(gmpeplot) == 1:
                gmpeplot = os.path.basename(gmpeplot[0])
            else:
                gmpeplot = ""

            if os.path.exists(os.path.join(a_outdir, velfile)):
                idxout.write("<tr>\n")
                idxout.write("<td>Velocity</td>\n")
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", velfile),
                              "BBP"))
                if os.path.exists(os.path.join(a_outdir, velplot)):
                    idxout.write('<td><a href="%s">%s</a></td>\n' %
                                 (os.path.join(".", velplot),
                                  "PNG"))
                idxout.write("</tr>\n")
            if os.path.exists(os.path.join(a_outdir, accfile)):
                idxout.write("<tr>\n")
                idxout.write("<td>Acceleration</td>\n")
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", accfile),
                              "BBP"))
                if os.path.exists(os.path.join(a_outdir, accplot)):
                    idxout.write('<td><a href="%s">%s</a></td>\n' %
                                 (os.path.join(".", accplot),
                                  "PNG"))
                idxout.write("</tr>\n")
            if os.path.exists(os.path.join(a_outdir, rd50file)):
                idxout.write("<tr>\n")
                idxout.write("<td>RotD50</td>\n")
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", rd50file),
                              "data"))
                if rd50plot:
                    idxout.write('<td><a href="%s">%s</a></td>\n' %
                                 (os.path.join(".", rd50plot),
                                  "PNG"))
                idxout.write("</tr>\n")
            if os.path.exists(os.path.join(a_outdir, rspfile)):
                idxout.write("<tr>\n")
                idxout.write("<td>Respect</td>\n")
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", rspfile),
                              "data"))
                if rspplot:
                    idxout.write('<td><a href="%s">%s</a></td>\n' %
                                 (os.path.join(".", rspplot),
                                  "PNG"))
                idxout.write("</tr>\n")
            if overlayfile:
                idxout.write("<tr>\n")
                idxout.write("<td>Overlay</td>\n")
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", overlayfile),
                              "PNG"))
                idxout.write("</tr>\n")
            if gmpeplot:
                idxout.write("<tr>\n")
                idxout.write("<td>GMPE Plot</td>\n")
                idxout.write('<td><a href="%s">%s</a></td>\n' %
                             (os.path.join(".", gmpeplot),
                              "PNG"))
                idxout.write("</tr>\n")

            idxout.write("</table>\n")

        idxout.write("</body>\n")
        idxout.write("</html>\n")
        idxout.close()

        print("==> Wrote file: %s" % (index_file))
        print("GenHTML Completed".center(80, '-'))
Example #43
0
    def calculate_observations(self, a_indir, a_statfile, a_tmpdir_seis, a_dstdir):
        """
        This function calculates RotD100/RotD50 for the observation
        seismograms. It corrects the observations using the user-provided
        correction coefficients.
        """
        sim_id = self.sim_id
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # Inialize the CorrectPSA module
        if self.obs_corrections:
            corr_psa = CorrectPSA(self.r_stations,
                                  "rd100",
                                  os.path.join(a_indir,
                                               self.obs_corrections),
                                  a_tmpdir_seis, sim_id)
        else:
            corr_psa = None

        # List of observed seismogram files
        filelist = os.listdir(self.a_obsdir)

        # Go through each station
        for site in site_list:
            stat = site.scode
            print("==> Calculating observations RotD100 for station: %s" %
                  (stat))
            # Check if we have the corresponding calculated seismogram
            expected_calculated_file = os.path.join(a_dstdir,
                                                    "%d.%s.rd100" %
                                                    (sim_id, stat))
            if not os.path.exists(expected_calculated_file):
                # Just skip it
                print("Couldn't find file: %s" %
                      (expected_calculated_file) +
                      "This is not necessarily an error, as you may have " +
                      "run with a subset of a stations. Continuing " +
                      "with available stations.")
                continue

            # Ok, we have a simulated seismogram for this station,
            # let's look for the observed file
            r_e_peer_file = None
            r_n_peer_file = None
            r_z_peer_file = None
            r_bbp_file = "%s.bbp" % (stat)
            # Do different things depending on the format of the
            # observed seismograms
            if self.obs_format == "acc_bbp":
                # We need to look for the bbp file
                if r_bbp_file not in filelist:
                    # No bbp file for this station
                    continue
                print(r_bbp_file)
                # Copy bbp file to the tmp seismogram directory
                a_src_bbp_file = os.path.join(self.a_obsdir, r_bbp_file)
                a_dst_bbp_file = os.path.join(a_tmpdir_seis, r_bbp_file)
                shutil.copy2(a_src_bbp_file, a_dst_bbp_file)
                # Now we need to create the peer files to process with rotd50
                r_e_peer_file = os.path.join(a_tmpdir_seis, "%s_E.acc" % (stat))
                r_n_peer_file = os.path.join(a_tmpdir_seis, "%s_N.acc" % (stat))
                r_z_peer_file = os.path.join(a_tmpdir_seis, "%s_Z.acc" % (stat))
                bbp_formatter.bbp2peer(a_dst_bbp_file,
                                       r_n_peer_file,
                                       r_e_peer_file,
                                       r_z_peer_file)
            elif self.obs_format == "acc_peer":
                # Look for the E, N, and Z files
                for my_file in filelist:
                    if my_file.endswith("%s_E.acc" % (stat)):
                        r_e_peer_file = my_file
                        if (r_n_peer_file is not None and
                            r_z_peer_file is not None):
                            break
                    elif my_file.endswith("%s_N.acc" % (stat)):
                        r_n_peer_file = my_file
                        if (r_e_peer_file is not None and
                            r_z_peer_file is not None):
                            break
                    elif my_file.endswith("%s_Z.acc" % (stat)):
                        r_z_peer_file = my_file
                        if (r_e_peer_file is not None and
                            r_n_peer_file is not None):
                            break
                if ((r_e_peer_file is None) or
                    (r_n_peer_file is None) or
                    (r_z_peer_file is None)):
                    # Couldn't find all 3 files
                    continue
                #print(r_e_peer_file, r_n_peer_file, r_z_peer_file)
                # Copy all three files to the tmp seismogram directory
                for eachfile in (r_e_peer_file, r_n_peer_file, r_z_peer_file):
                    a_src_peer_file = os.path.join(self.a_obsdir, eachfile)
                    a_dst_peer_file = os.path.join(a_tmpdir_seis, eachfile)
                    shutil.copy2(a_src_peer_file, a_dst_peer_file)

                # Now we need to convert them into bbp format
                bbp_formatter.peer2bbp(os.path.join(a_tmpdir_seis,
                                                    r_n_peer_file),
                                       os.path.join(a_tmpdir_seis,
                                                    r_e_peer_file),
                                       os.path.join(a_tmpdir_seis,
                                                    r_z_peer_file),
                                       os.path.join(a_tmpdir_seis,
                                                    r_bbp_file))
            else:
                raise bband_utils.ParameterError("Format %s for " %
                                                 (self.obs_format) +
                                                 "observed seismograms "
                                                 "not supported")

            # Run RotD100 on this file
            if corr_psa is not None:
                # First calculate rd100/50 and psa5 files
                self.do_rotd100(a_tmpdir_seis, r_e_peer_file,
                                 r_n_peer_file, r_z_peer_file,
                                 "%s-orig.rd100" % (stat),
                                 self.log)

                # Now we need to correct the RotD100/RotD50 output
                # using the user-supplied correction factors
                corr_psa.correct_station(stat, "rd100")
            else:
                # Use final names for output files
                self.do_rotd100(a_tmpdir_seis, r_e_peer_file,
                                r_n_peer_file, r_z_peer_file,
                                "%s.rd100" % (stat),
                                self.log)
            shutil.copy2(os.path.join(a_tmpdir_seis, "%s.rd100" % (stat)),
                         os.path.join(a_dstdir, "%s.rd100" % (stat)))
Example #44
0
    def run(self):
        """
        Runs the Anderson GoF code
        """
        print("RZZ2015".center(80, '-'))

        # Load configuration, set sim_id
        install = InstallCfg.getInstance()
        sim_id = self.sim_id

        # Build directory paths
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))
        a_validation_outdir = os.path.join(a_outdir, "validations", "rzz2015")

        # Make sure the output and tmp directories exist
        bband_utils.mkdirs([a_tmpdir, a_indir, a_outdir, a_validation_outdir],
                           print_cmd=False)

        # Now the file paths
        self.log = os.path.join(a_logdir, "%d.rzz2015.log" % (sim_id))
        sta_file = os.path.join(a_indir, self.stations)
        sta_base = os.path.basename(os.path.splitext(self.stations)[0])
        obs_dir = os.path.join(a_tmpdir, "obs_seis_%s" % (sta_base))

        # Get station list
        slo = StationList(sta_file)
        site_list = slo.getStationList()

        # Create output file, add header
        out_file = open(os.path.join(a_validation_outdir,
                                     '%d.rzz2015.%s.txt' %
                                     (self.sim_id, self.eventname)), 'w')
        out_file.write("#station, component, epsilon_a, nu_a,"
                       " epsilon_b, nu_b, epsilon_c, nu_c,"
                       " r1_record, r1_siml, r2_record, r2_siml,"
                       " r3_record, r3_siml, r4_record, r4_siml,"
                       " r5_record, r5_siml, r6_record, r6_siml\n")
        out_file.close()

        # Go through each station
        for site in site_list:
            stat = site.scode
            r_obs_bbp = "%s.bbp" % (stat)
            a_obs_bbp = os.path.join(obs_dir, r_obs_bbp)
            r_sym_bbp = "%d.%s.acc.bbp" % (sim_id, stat)
            a_sym_bbp = os.path.join(a_outdir, r_sym_bbp)

            if not (os.path.exists(a_obs_bbp) and
                    os.path.exists(a_sym_bbp)):
                # Just skip it
                print("===> Couldn't find files "
                      "%s and %s, skipping station %s" %
                      (a_obs_bbp, a_sym_bbp, stat))
                continue

            obs_data = self.read_bbp(a_obs_bbp)
            sym_data = self.read_bbp(a_sym_bbp)

            # Process each component separately
            for comp in range(1, 3):
                self.process(stat, comp, a_validation_outdir,
                             obs_data[0], obs_data[comp],
                             sym_data[0], sym_data[comp])

        print("RZZ2015 Completed".center(80, '-'))
Example #45
0
    def calculate_residuals(self, a_statfile, a_dstdir):
        """
        This function calculates the residuals comparing observations and
        calculated data.
        """
        install = install_cfg.InstallCfg.getInstance()
        sim_id = self.sim_id
        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        print_header = 1
        rd100_resid_output = os.path.join(a_dstdir, "%s-%d-resid-rd100.txt" %
                                          (self.comp_label, sim_id))
        # If output file exists, delete it
        if os.path.exists(rd100_resid_output):
            os.remove(rd100_resid_output)

        # Filenames for tmp files, check if filename size within bounds
        obsfile = os.path.join(a_dstdir, "rd100_obs.txt")
        simfile = os.path.join(a_dstdir, "rd100_sim.txt")
        bband_utils.check_path_lengths([obsfile, simfile,
                                        rd100_resid_output],
                                       bband_utils.GP_MAX_FILENAME)

        # Loop through all stations
        for site in site_list:
            slon = float(site.lon)
            slat = float(site.lat)
            stat = site.scode

            # Trim files
            self.trim_rd100_file(os.path.join(a_dstdir,
                                              "%d.%s.rd100" % (sim_id, stat)),
                                 simfile)
            self.trim_rd100_file(os.path.join(a_dstdir,
                                              "%s.rd100" % (stat)),
                                 obsfile)

            # Calculate Rrup
            origin = (self.src_keys['lon_top_center'],
                      self.src_keys['lat_top_center'])
            dims = (self.src_keys['fault_length'], self.src_keys['dlen'],
                    self.src_keys['fault_width'], self.src_keys['dwid'],
                    self.src_keys['depth_to_top'])
            mech = (self.src_keys['strike'], self.src_keys['dip'],
                    self.src_keys['rake'])

            site_geom = [float(site.lon), float(site.lat), 0.0]
            (fault_trace1, up_seis_depth,
             low_seis_depth, ave_dip,
             dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech)
            _, rrup, _ = putils.DistanceToSimpleFaultSurface(site_geom,
                                                             fault_trace1,
                                                             up_seis_depth,
                                                             low_seis_depth,
                                                             ave_dip)

            cmd = ("%s bbp_format=1 " %
                   (os.path.join(install.A_GP_BIN_DIR,
                                 "gen_resid_tbl_3comp")) +
                   "datafile1=%s simfile1=%s " % (obsfile, simfile) +
                   "comp1=rotd50 comp2=rotd100 comp3=ratio " +
                   "eqname=%s mag=%s stat=%s lon=%.4f lat=%.4f " %
                   (self.comp_label, self.mag, stat, slon, slat) +
                   "vs30=%d cd=%.2f " % (site.vs30, rrup) +
                   "flo=%f fhi=%f " % (site.low_freq_corner,
                                       site.high_freq_corner) +
                   "print_header=%d >> %s 2>> %s" %
                   (print_header, rd100_resid_output, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # Only need to print header the first time
            if print_header == 1:
                print_header = 0

        # Remove temp files
        try:
            os.remove(obsfile)
            os.remove(simfile)
        except:
            pass
Example #46
0
def create_resid_data_file(comp_label, input_indir, input_obsdir,
                           combined_file, temp_dir):
    """
    This function creates a file containing the combined residuals
    from the simulation data from all stations
    """
    # Copy header for first file, set logfile
    copy_header = 1
    logfile = os.path.join(temp_dir, "log.txt")

    # Figure out where out binaries are
    if "BBP_DIR" in os.environ:
        install_root = os.path.normpath(os.environ["BBP_DIR"])
    else:
        raise bband_utils.ProcessingError("BBP_DIR is not set!")
    gp_bin_dir = os.path.join(install_root, "src", "gp", "bin")

    # Get realizations
    realizations = sorted(os.listdir(input_indir))
    one_realization = realizations[0]
    basedir = os.path.join(input_indir, one_realization)

    # Get the station list
    a_statfile = glob.glob("%s%s*.stl" % (basedir, os.sep))
    if len(a_statfile) != 1:
        raise bband_utils.ProcessingError("Cannot get station list!")
    a_statfile = a_statfile[0]
    slo = StationList(a_statfile)
    site_list = slo.getStationList()

    # Get source file
    a_srcfile = glob.glob("%s%s*.src" % (basedir, os.sep))
    if len(a_srcfile) == 0:
        raise bband_utils.ProcessingError("Cannot get src file!")
    a_srcfile = a_srcfile[0]

    # Parse it!
    src_keys = bband_utils.parse_src_file(a_srcfile)

    # Get the obsdir
    realizations = sorted(os.listdir(input_obsdir))
    one_realization = realizations[0]
    basedir = os.path.join(input_obsdir, one_realization)
    obs_dir = glob.glob("%s%sobs_seis*" % (basedir, os.sep))
    if len(obs_dir) != 1:
        raise bband_utils.ProcessingError("Cannot get observation dir!")
    obs_dir = obs_dir[0]

    # Go through all stations
    for site in site_list:
        slon = float(site.lon)
        slat = float(site.lat)
        stat = site.scode

        # Calculate Rrup
        origin = (src_keys['lon_top_center'], src_keys['lat_top_center'])
        dims = (src_keys['fault_length'], src_keys['dlen'],
                src_keys['fault_width'], src_keys['dwid'],
                src_keys['depth_to_top'])
        mech = (src_keys['strike'], src_keys['dip'], src_keys['rake'])

        site_geom = [float(site.lon), float(site.lat), 0.0]
        (fault_trace1, up_seis_depth, low_seis_depth, ave_dip, dummy1,
         dummy2) = putils.FaultTraceGen(origin, dims, mech)
        _, rrup, _ = putils.DistanceToSimpleFaultSurface(
            site_geom, fault_trace1, up_seis_depth, low_seis_depth, ave_dip)

        simfile1 = os.path.join(temp_dir, "%s.rd50" % (stat))
        datafile1 = os.path.join(obs_dir, "%s.rd50" % (stat))

        cmd = ("%s bbp_format=1 " %
               (os.path.join(gp_bin_dir, "gen_resid_tbl_3comp")) +
               "datafile1=%s simfile1=%s " % (datafile1, simfile1) +
               "comp1=psa5n comp2=psa5e comp3=rotd50 " +
               "eqname=%s mag=0.0 stat=%s lon=%.4f lat=%.4f " %
               (comp_label, stat, slon, slat) + "vs30=%d cd=%.2f " %
               (site.vs30, rrup) + "flo=%f fhi=%f " %
               (site.low_freq_corner, site.high_freq_corner) +
               "print_header=%d >> %s 2>> %s" %
               (copy_header, combined_file, logfile))
        bband_utils.runprog(cmd, abort_on_error=True)

        if copy_header == 1:
            copy_header = 0
Example #47
0
output_dir = sys.argv[5]

# Create directory paths
install = InstallCfg.getInstance()
config = GPGofCfg()
a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id_1))
a_outdir1 = os.path.join(install.A_OUT_DATA_DIR, str(sim_id_1))
a_outdir2 = os.path.join(install.A_OUT_DATA_DIR, str(sim_id_2))

# Src file
a_srcfile = os.path.join(a_indir, src_file)
src_keys = bband_utils.parse_src_file(a_srcfile)

# Station file
a_statfile = os.path.join(a_indir, station_list)
slo = StationList(a_statfile)
site_list = slo.getStationList()

# Capture event_label
bias_file = glob.glob("%s%s*.bias" % (a_outdir1, os.sep))
if len(bias_file) < 1:
    raise bband_utils.ProcessingError("Cannot find event label!")
bias_file = bias_file[0]
# Let's capture the event label
event_label = os.path.basename(bias_file).split("-")[0]

print_header_rd50 = 1

# Go through the stations
for site in site_list:
    stat = site.scode
Example #48
0
    def run(self):
        """
        This function in the main entry point for this module. It runs
        the gp_gof component.
        """
        print("GP GoF".center(80, '-'))

        # Initialize basic variables
        self.install = InstallCfg.getInstance()
        self.config = GPGofCfg()
        install = self.install
        config = self.config
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        self.log = os.path.join(install.A_OUT_LOG_DIR,
                                str(sim_id),
                                "%d.gp_gof.log" %
                                (sim_id))

        # Input, tmp, and output directories
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_outdir_seis = os.path.join(install.A_OUT_DATA_DIR, str(sim_id),
                                     "obs_seis_%s" % (sta_base))
        a_outdir_gmpe = os.path.join(install.A_OUT_DATA_DIR, str(sim_id),
                                     "gmpe_data_%s" % (sta_base))

        # Source file, parse it!
        a_srcfile = os.path.join(install.A_IN_DATA_DIR,
                                 str(sim_id),
                                 self.r_srcfile)
        self.src_keys = bband_utils.parse_src_file(a_srcfile)

        # Station file
        a_statfile = os.path.join(install.A_IN_DATA_DIR,
                                  str(sim_id),
                                  self.r_stations)
        # List of observed seismogram files
        filelist = os.listdir(a_outdir_seis)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        # check cutoff value
        if self.max_cutoff is None:
            self.max_cutoff = config.MAX_CDST

        print_header_rd50 = 1
        # Remove rd50 resid file
        rd50_resid_output = os.path.join(a_outdir, "%s-%d.rd50-resid.txt" %
                                         (self.comp_label, sim_id))
        if os.path.exists(rd50_resid_output):
            os.remove(rd50_resid_output)

        for site in site_list:
            slon = float(site.lon)
            slat = float(site.lat)
            stat = site.scode

            # Now process rd50 files
            expected_rd50_file = os.path.join(a_outdir, "%d.%s.rd50" %
                                              (sim_id, stat))
            if not os.path.exists(expected_rd50_file):
                # just skip it
                print("Skipping rotd50/psa5 for station %s..." % (stat))
                continue

            # See if the rd50 file exist for comparison. If it doesn't
            # exist, skip this station
            rd50_file = None
            if ("%s.rd50" % (stat)) in filelist:
                rd50_file = "%s.rd50" % (stat)
            else:
                # Skip this station
                continue

            # Calculate Rrup
            origin = (self.src_keys['lon_top_center'],
                      self.src_keys['lat_top_center'])
            dims = (self.src_keys['fault_length'], self.src_keys['dlen'],
                    self.src_keys['fault_width'], self.src_keys['dwid'],
                    self.src_keys['depth_to_top'])
            mech = (self.src_keys['strike'], self.src_keys['dip'],
                    self.src_keys['rake'])

            site_geom = [float(site.lon), float(site.lat), 0.0]
            (fault_trace1, up_seis_depth,
             low_seis_depth, ave_dip,
             dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech)
            _, rrup, _ = putils.DistanceToSimpleFaultSurface(site_geom,
                                                             fault_trace1,
                                                             up_seis_depth,
                                                             low_seis_depth,
                                                             ave_dip)

            # Create path names and check if their sizes are within bounds
            datafile1 = os.path.join(a_outdir_seis, rd50_file)
            simfile1 = os.path.join(a_outdir, "%d.%s.rd50" %
                                    (sim_id, stat))
            outfile = os.path.join(a_outdir, "%s-%d.rd50-resid.txt" %
                                   (self.comp_label, self.sim_id))
            bband_utils.check_path_lengths([datafile1, simfile1, outfile],
                                           bband_utils.GP_MAX_FILENAME)

            cmd = ("%s/gen_resid_tbl_3comp bbp_format=1 " %
                   (install.A_GP_BIN_DIR) +
                   "datafile1=%s simfile1=%s " % (datafile1, simfile1) +
                   "comp1=psa5n comp2=psa5e comp3=rotd50 " +
                   "eqname=%s mag=%s stat=%s lon=%.4f lat=%.4f " %
                   (self.comp_label, self.mag, stat, slon, slat) +
                   "vs30=%d cd=%.2f " % (site.vs30, rrup) +
                   "flo=%f fhi=%f " % (site.low_freq_corner,
                                       site.high_freq_corner) +
                   "print_header=%d >> %s 2>> %s" %
                   (print_header_rd50, outfile, self.log))
            bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

            # Only need to print header the first time
            if print_header_rd50 == 1:
                print_header_rd50 = 0

        # Finished per station processing, now summarize and plot the data
        if os.path.exists(rd50_resid_output):
            self.summarize_rotd50(site_list, a_outdir, a_outdir_gmpe)

        print("GP GoF Completed".center(80, '-'))
Example #49
0
    def run(self):
        """
        Runs the Anderson GoF code
        """
        print("Anderson GoF".center(80, '-'))

        # Load configuration, set sim_id
        install = InstallCfg.getInstance()
        sim_id = self.sim_id

        # Build directory paths
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_indir = os.path.join(install.A_IN_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))
        a_validation_outdir = os.path.join(a_outdir,
                                           "validations",
                                           "anderson_gof")

        # Make sure the output and tmp directories exist
        bband_utils.mkdirs([a_tmpdir, a_indir, a_outdir, a_validation_outdir],
                           print_cmd=False)

        # Now the file paths
        self.log = os.path.join(a_logdir, "%d.anderson.log" % (sim_id))
        sta_file = os.path.join(a_indir, self.stations)
        sta_base = os.path.basename(os.path.splitext(self.stations)[0])
        sims_dir = a_outdir
        obs_dir = os.path.join(a_tmpdir, "obs_seis_%s" % (sta_base))

        # Start with first record
        irec = 0

        # Read station list
        slo = StationList(sta_file)
        site_list = slo.getStationList()

        # Figure out station names
        station_names = []
        for station in site_list:
            station_names.append(station.scode)

        # Loop over stations
        for site in site_list:
            station = site.scode

            print("==> Processing station: %s" % (station))

            file_sims_acc = os.path.join(sims_dir, "%d.%s.acc.bbp" %
                                         (sim_id, station))
            file_sims_rd50 = os.path.join(sims_dir, "%d.%s.rd50" %
                                          (sim_id, station))
            lowcut = site.low_freq_corner
            highcut = site.high_freq_corner
            #print(lowcut, highcut)

            (sims_acc_org_time, sims_acc_org_ns,
             sims_acc_org_ew, sims_acc_org_ver) = np.genfromtxt(file_sims_acc,
                                                                skip_header=2,
                                                                dtype='float64',
                                                                unpack='TRUE')
            (sims_perd, sims_rd50_ns,
             sims_rd50_ew, sims_rd50_ver) = np.genfromtxt(file_sims_rd50,
                                                          skip_header=2,
                                                          dtype='float64',
                                                          unpack='TRUE')

            file_obs_acc = os.path.join(obs_dir, "%s.bbp" %
                                        (station))
            file_obs_rd50 = os.path.join(obs_dir, "%s.rd50" %
                                         (station))

            (obs_acc_org_time, obs_acc_org_ns,
             obs_acc_org_ew, obs_acc_org_ver) = np.genfromtxt(file_obs_acc,
                                                              skip_header=2,
                                                              dtype='float64',
                                                              unpack='TRUE')
            (obs_perd, obs_rd50_ns,
             obs_rd50_ew, obs_rd50_ver) = np.genfromtxt(file_obs_rd50,
                                                        skip_header=2,
                                                        dtype='float64',
                                                        unpack='TRUE')
            # Intitialize the rd50 arrays
            RD50PER = len(obs_perd)
            rd1 = np.zeros(RD50PER)
            rd2 = np.zeros(RD50PER)
            rd3 = np.zeros(RD50PER)
            rd4 = np.zeros(RD50PER)

            # Resample and align the time series
            (obs_acc_time,
             obs_acc_ew,
             sims_acc_time,
             sims_acc_ew) = self.align_seismograms(obs_acc_org_time,
                                                   obs_acc_org_ew,
                                                   sims_acc_org_time,
                                                   sims_acc_org_ew)

            (obs_acc_time,
             obs_acc_ns,
             sims_acc_time,
             sims_acc_ns) = self.align_seismograms(obs_acc_org_time,
                                                   obs_acc_org_ns,
                                                   sims_acc_org_time,
                                                   sims_acc_org_ns)

            obs_org_dt = obs_acc_org_time[1] - obs_acc_org_time[0]
            sims_org_dt = sims_acc_org_time[1] - sims_acc_org_time[0]
            obs_dt = obs_acc_time[1] - obs_acc_time[0]
            sims_dt = sims_acc_time[1] - sims_acc_time[0]

            if obs_dt == sims_dt:
                self.dt = obs_dt

            fs = 1. / self.dt
            fnyq = 0.5 * fs

            # Compute the number of pads for the time series
            # to have equal number of points for the fft
            # and for criteria 1 and 2.

            (sims_acc_ns, sims_acc_ew,
             obs_acc_ns, obs_acc_ew, ndata) = self.smcpadf(sims_acc_ns,
                                                           sims_acc_ew,
                                                           obs_acc_ns,
                                                           obs_acc_ew,
                                                           self.dt, lowcut,
                                                           8, highcut, 8,
                                                           'FALSE')
            # Start the loop for the different frequency bands
            for iband in range(len(self.B)):
                f1 = self.B[iband][0]
                f2 = self.B[iband][1]
                # Do the job only if the frequency band is within
                # the filtered band and if fnyq is higher than f1
                if f1 >= lowcut and f2 <= highcut and fnyq >= f2:
                    #print("Working on Period Band :", iband + 1,
                    #      "[", 1. / f2, 1. / f1, "]")
                    T1 = 1. / f1
                    T2 = 1. / f2
                    t_tmp = sims_perd[(sims_perd <= T1) & (T2 <= sims_perd)]

                    acc_1_flt = self.butter_bandpass(f1, f2, fnyq, sims_acc_ns, 2)
                    acc_2_flt = self.butter_bandpass(f1, f2, fnyq, sims_acc_ew, 2)
                    acc_3_flt = self.butter_bandpass(f1, f2, fnyq, obs_acc_ns, 2)
                    acc_4_flt = self.butter_bandpass(f1, f2, fnyq, obs_acc_ew, 2)

                    # Work on the frequency domain

                    # Do the response spectra
                    # Save the rsp for the specific frequency band
                    rd1 = sims_rd50_ns[(sims_perd <= T1) & (T2 <= sims_perd)]
                    rd2 = sims_rd50_ew[(sims_perd <= T1) & (T2 <= sims_perd)]
                    rd3 = obs_rd50_ns[(obs_perd <= T1) & (T2 <= obs_perd)]
                    rd4 = obs_rd50_ew[(obs_perd <= T1) & (T2 <= obs_perd)]

                    self.C8[irec, iband] = np.nanmean(
                        [self.c8_eval(rd1, rd3, t_tmp),
                         self.c8_eval(rd2, rd4, t_tmp)])

                    # Now the FFT
                    # Compute the FFT frequencies
                    F = np.fft.fftfreq(ndata, self.dt)
                    # Compute the fft and the amplitudes
                    fft_1 = np.fft.fft(sims_acc_ns)
                    fft_1 = fft_1[(0. <= F) & (f1 <= F) & (F <= f2)]
                    fft_2 = np.fft.fft(sims_acc_ew)
                    fft_2 = fft_2[(0. <= F) & (f1 <= F) & (F <= f2)]
                    fft_3 = np.fft.fft(obs_acc_ns)
                    fft_3 = fft_3[(0. <= F) & (f1 <= F) & (F <= f2)]
                    fft_4 = np.fft.fft(obs_acc_ew)
                    fft_4 = fft_4[(0. <= F) & (f1 <= F) & (F <= f2)]
                    # Slice the FFT frequencies for the working frequency band
                    F = F[(f1 <= F) & (F <= f2)]

                    fs1 = abs(fft_1) / len(fft_1)
                    fs2 = abs(fft_2) / len(fft_2)
                    fs3 = abs(fft_3) / len(fft_3)
                    fs4 = abs(fft_4) / len(fft_4)

                    self.C9[irec, iband] = np.nanmean(
                        [self.c9_eval(fs1, fs3, F),
                        self.c9_eval(fs2, fs4, F)])

                    # Work on the time domain
                    """
                    # Compute the site corrected accelerograms
                    acc_3_scor = np.fft.ifft(fft_3)
                    acc_4_scor = np.fft.ifft(fft_4)

                    # These do not need filtering because I'm working
                    # in the sliced frequency domain
                    acc_3_flt = abs(acc_3_scor)
                    acc_4_flt = abs(acc_4_scor)
                    """
                    vel_1 = self.integ(acc_1_flt, self.dt)
                    vel_2 = self.integ(acc_2_flt, self.dt)
                    vel_3 = self.integ(acc_3_flt, self.dt)
                    vel_4 = self.integ(acc_4_flt, self.dt)

                    dis_1 = self.integ(vel_1, self.dt)
                    dis_2 = self.integ(vel_2, self.dt)
                    dis_3 = self.integ(vel_3, self.dt)
                    dis_4 = self.integ(vel_4, self.dt)

                    c11, c31 = self.c13_eval(acc_1_flt,
                                             acc_3_flt)
                    c12, c32 = self.c13_eval(acc_2_flt,
                                             acc_4_flt)

                    c21, c41 = self.c24_eval(vel_1,
                                             vel_3)
                    c22, c42 = self.c24_eval(vel_2,
                                             vel_4)

                    self.C1[irec, iband] = np.nanmean(
                                           np.array(c11, c12))
                    self.C2[irec, iband] = np.nanmean(
                                           np.array(c21, c22))
                    self.C3[irec, iband] = np.nanmean(
                                           np.array(c31, c32))
                    self.C4[irec, iband] = np.nanmean(
                                           np.array(c41, c42))
                    self.C5[irec, iband] = np.nanmean(
                                           [self.c5_eval(acc_1_flt,
                                                         acc_3_flt),
                                            self.c5_eval(acc_2_flt,
                                                         acc_4_flt)])
                    self.C6[irec, iband] = np.nanmean(
                                           [self.c6_eval(vel_1, vel_3),
                                            self.c6_eval(vel_2, vel_4)])
                    self.C7[irec, iband] = np.nanmean(
                                           [self.c7_eval(dis_1, dis_3),
                                            self.c7_eval(dis_2, dis_4)])
                    self.C10[irec, iband] = np.nanmean(
                                            [self.c10_eval(acc_1_flt,
                                                           acc_3_flt),
                                             self.c10_eval(acc_2_flt,
                                                           acc_4_flt)])

                    #print(self.C1[irec, iband],
                    #      self.C2[irec, iband],
                    #      self.C3[irec, iband],
                    #      self.C4[irec, iband],
                    #      self.C5[irec, iband],
                    #      self.C6[irec, iband],
                    #      self.C7[irec, iband],
                    #      self.C8[irec, iband],
                    #      self.C9[irec, iband],
                    #      self.C10[irec, iband])

            self.S1[irec] = np.nanmean(
                np.array([np.nanmean(self.C1[irec, :]),
                          np.nanmean(self.C2[irec, :]),
                          np.nanmean(self.C3[irec, :]),
                          np.nanmean(self.C4[irec, :]),
                          np.nanmean(self.C5[irec, :]),
                          np.nanmean(self.C6[irec, :]),
                          np.nanmean(self.C7[irec, :]),
                          np.nanmean(self.C8[irec, :]),
                          np.nanmean(self.C9[irec, :]),
                          np.nanmean(self.C10[irec, :])]))

            output_file = os.path.join(a_validation_outdir,
                                       "gof-%s-%d-anderson-%s.txt" %
                                       (self.eventname, self.sim_id,
                                        station))
            out_file = open(output_file, 'w')
            line = ('#%s%5s%4s%4s%4s%4s%4s%4s%4s%4s%4s\n' %
                    ('band', 'C1', 'C2', 'C3', 'C4', 'C5',
                     'C6', 'C7', 'C8', 'C9', 'C10'))
            out_file.write(line)
            for i in range(self.BMAX):
                line = ('%s %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f\n' %
                        (self.BNAMES[i], self.C1[irec, i], self.C2[irec, i],
                         self.C3[irec, i], self.C4[irec, i], self.C5[irec, i],
                         self.C6[irec, i], self.C7[irec, i], self.C8[irec, i],
                         self.C9[irec, i], self.C10[irec, i]))
                out_file.write(line)
            out_file.close()

            output_file = os.path.join(a_validation_outdir,
                                       "gof-%s-%d-anderson-%s.png" %
                                       (self.eventname, self.sim_id,
                                        station))
            self.cplots(irec, station, output_file)
            print('===> Station score :', "{:3.1f}".format(self.S1[irec]))

            irec = irec + 1

        print('==> Total number of stations processed: %d' % (irec))
        self.C1 = self.C1[0:irec, :]
        self.C2 = self.C2[0:irec, :]
        self.C3 = self.C3[0:irec, :]
        self.C4 = self.C4[0:irec, :]
        self.C5 = self.C5[0:irec, :]
        self.C6 = self.C6[0:irec, :]
        self.C7 = self.C7[0:irec, :]
        self.C8 = self.C8[0:irec, :]
        self.C9 = self.C9[0:irec, :]
        self.C10 = self.C10[0:irec, :]
        self.S1 = self.S1[0:irec]

        c1conf = [self.statts(self.C1[:, i]) for i in range(self.BMAX)]
        c2conf = [self.statts(self.C2[:, i]) for i in range(self.BMAX)]
        c3conf = [self.statts(self.C3[:, i]) for i in range(self.BMAX)]
        c4conf = [self.statts(self.C4[:, i]) for i in range(self.BMAX)]
        c5conf = [self.statts(self.C5[:, i]) for i in range(self.BMAX)]
        c6conf = [self.statts(self.C6[:, i]) for i in range(self.BMAX)]
        c7conf = [self.statts(self.C7[:, i]) for i in range(self.BMAX)]
        c8conf = [self.statts(self.C8[:, i]) for i in range(self.BMAX)]
        c9conf = [self.statts(self.C9[:, i]) for i in range(self.BMAX)]
        c10conf = [self.statts(self.C10[:, i]) for i in range(self.BMAX)]
        s1_event = np.nanmean(self.S1)
        print('==> Overall event score:', "{:3.1f}".format(s1_event))

        output_file = os.path.join(a_validation_outdir,
                                   '%d.gof_anderson.%s.txt' %
                                   (self.sim_id, self.eventname))
        out_file = open(output_file, 'w')
        line = ('#%s%5s%4s%4s%4s%4s%4s%4s%4s%4s%4s\n' %
                ('band', 'C1', 'C2', 'C3', 'C4', 'C5',
                 'C6', 'C7', 'C8', 'C9', 'C10'))
        out_file.write(line)
        for i in range(self.BMAX):
            line = ('%s %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f %3.1f\n' %
                    (self.BNAMES[i], c1conf[i][0], c2conf[i][0], c3conf[i][0],
                     c4conf[i][0], c5conf[i][0], c6conf[i][0], c7conf[i][0],
                     c8conf[i][0], c9conf[i][0], c10conf[i][0]))
            out_file.write(line)
        out_file.close()

        output_file = os.path.join(a_validation_outdir,
                                   "gof-%s-%d-anderson-summary.png" %
                                   (self.eventname, self.sim_id))
        self.fplots(s1_event, np.asarray(c1conf), np.asarray(c2conf),
                    np.asarray(c3conf), np.asarray(c4conf), np.asarray(c5conf),
                    np.asarray(c6conf), np.asarray(c7conf), np.asarray(c8conf),
                    np.asarray(c9conf), np.asarray(c10conf), output_file)

        print("Anderson GoF Completed".center(80, '-'))
Example #50
0
    def run(self):
        print("Generating Plots".center(80, '-'))

        # Initialize basic variables
        install = InstallCfg.getInstance()
        sim_id = self.sim_id
        sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])

        self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id),
                                "%d.gen_plots.log" % (sim_id))

        # Input, tmp, and output directories
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_tmpdir_seis = os.path.join(install.A_TMP_DATA_DIR, str(sim_id),
                                     "obs_seis_%s" % (sta_base))

        # Station file
        a_statfile = os.path.join(install.A_IN_DATA_DIR, str(sim_id),
                                  self.r_stations)
        # List of observed seismogram files
        filelist = os.listdir(a_tmpdir_seis)

        slo = StationList(a_statfile)
        site_list = slo.getStationList()

        for site in site_list:
            stat = site.scode

            # Look for the files we need
            bbpfile = os.path.join(a_tmpdir_seis, "%s.bbp" % stat)
            expected_file = os.path.join(a_outdir,
                                         "%d.%s.vel.bbp" % (sim_id, stat))
            if (not os.path.exists(expected_file)
                    or not os.path.exists(bbpfile)):
                # just skip this station
                continue

            print("==> Plotting seismogram comparison for station: %s" %
                  (stat))
            if self.format == 'vel':
                # We have velocity, nothing we need to do
                filename1 = bbpfile
            elif self.format == 'acc':
                # We have acceleration, must integrate first
                # Create path names and check if their sizes are within bounds
                nsfile = os.path.join(a_tmpdir, "temp.acc.000")
                ewfile = os.path.join(a_tmpdir, "temp.acc.090")
                udfile = os.path.join(a_tmpdir, "temp.acc.ver")
                bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s/wcc2bbp " % (install.A_GP_BIN_DIR) +
                       "nsfile=%s ewfile=%s udfile=%s " %
                       (nsfile, ewfile, udfile) + "wcc2bbp=0 < %s >> %s 2>&1" %
                       (bbpfile, self.log))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)

                for comp in ['000', '090', 'ver']:
                    # Create path names and check if their sizes are
                    # within bounds
                    filein = os.path.join(a_tmpdir, "temp.acc.%s" % (comp))
                    fileout = os.path.join(a_tmpdir, "temp.vel.%s" % (comp))

                    bband_utils.check_path_lengths([filein, fileout],
                                                   bband_utils.GP_MAX_FILENAME)

                    cmd = ("%s/integ_diff integ=1 " % (install.A_GP_BIN_DIR) +
                           "filein=%s fileout=%s >> %s 2>&1" %
                           (filein, fileout, self.log))
                    bband_utils.runprog(cmd,
                                        abort_on_error=True,
                                        print_cmd=False)

                # Create path names and check if their sizes are within bounds
                nsfile = os.path.join(a_tmpdir, "temp.vel.000")
                ewfile = os.path.join(a_tmpdir, "temp.vel.090")
                udfile = os.path.join(a_tmpdir, "temp.vel.ver")
                vel_bbp_file = os.path.join(a_tmpdir, "temp.%s.vel" % stat)

                bband_utils.check_path_lengths([nsfile, ewfile, udfile],
                                               bband_utils.GP_MAX_FILENAME)

                cmd = ("%s/wcc2bbp wcc2bbp=1 " % install.A_GP_BIN_DIR +
                       "nsfile=%s ewfile=%s udfile=%s > %s 2>> %s" %
                       (nsfile, ewfile, udfile, vel_bbp_file, self.log))
                bband_utils.runprog(cmd, abort_on_error=True, print_cmd=False)
                filename1 = vel_bbp_file

            # Generate arias duration files for calculated data
            calc_acc = os.path.join(a_outdir, "%d.%s.acc.bbp" % (sim_id, stat))
            calc_peer_n = os.path.join(a_tmpdir,
                                       "%d.%s_N.acc" % (sim_id, stat))
            calc_peer_e = os.path.join(a_tmpdir,
                                       "%d.%s_E.acc" % (sim_id, stat))
            calc_peer_z = os.path.join(a_tmpdir,
                                       "%d.%s_Z.acc" % (sim_id, stat))
            # Convert calculated acc seismogram into peer format
            bbp_formatter.bbp2peer(calc_acc, calc_peer_n, calc_peer_e,
                                   calc_peer_z)

            # Now calculate arias duration for each component
            for comp in ["N", "E", "Z"]:
                file_in = os.path.join(a_tmpdir,
                                       "%d.%s_%s.acc" % (sim_id, stat, comp))
                file_out = os.path.join(
                    a_tmpdir, "%d.%s_%s.arias" % (sim_id, stat, comp))
                arias_duration.ad_from_acc(file_in, file_out)

            # Generate arias duration files for observed data
            obs_acc = os.path.join(a_tmpdir_seis, "%s.bbp" % stat)
            obs_peer_n = os.path.join(a_tmpdir, "obs.%s_N.acc" % (stat))
            obs_peer_e = os.path.join(a_tmpdir, "obs.%s_E.acc" % (stat))
            obs_peer_z = os.path.join(a_tmpdir, "obs.%s_Z.acc" % (stat))
            # Convert observed acc seismogram into peer format
            bbp_formatter.bbp2peer(obs_acc, obs_peer_n, obs_peer_e, obs_peer_z)

            # Now calculate arias duration for each component
            for comp in ["N", "E", "Z"]:
                file_in = os.path.join(a_tmpdir,
                                       "obs.%s_%s.acc" % (stat, comp))
                file_out = os.path.join(a_tmpdir,
                                        "obs.%s_%s.arias" % (stat, comp))
                arias_duration.ad_from_acc(file_in, file_out)

            # Plot seismograms with arias duration
            filename2 = os.path.join(a_outdir,
                                     "%d.%s.vel.bbp" % (sim_id, stat))
            outfile = os.path.join(
                a_outdir,
                "%s_%d_%s_overlay.png" % (self.comp_label, sim_id, stat))
            obs_arias_n = os.path.join(a_tmpdir, "obs.%s_N.arias" % (stat))
            obs_arias_e = os.path.join(a_tmpdir, "obs.%s_E.arias" % (stat))
            obs_arias_z = os.path.join(a_tmpdir, "obs.%s_Z.arias" % (stat))
            calc_arias_n = os.path.join(a_tmpdir,
                                        "%d.%s_N.arias" % (sim_id, stat))
            calc_arias_e = os.path.join(a_tmpdir,
                                        "%d.%s_E.arias" % (sim_id, stat))
            calc_arias_z = os.path.join(a_tmpdir,
                                        "%d.%s_Z.arias" % (sim_id, stat))

            plot_seismograms.plot_overlay_with_arias(
                stat, filename1, filename2, obs_arias_n, obs_arias_e,
                obs_arias_z, calc_arias_n, calc_arias_e, calc_arias_z,
                self.comp_label, "run %d" % sim_id, outfile)

        # Now create rd50 comparison plots
        for site in site_list:
            stat = site.scode
            print("==> Plotting RotD50 comparison for station: %s" % (stat))

            # Now process rd50 files
            expected_rd50_file = os.path.join(a_outdir,
                                              "%d.%s.rd50" % (sim_id, stat))
            if not os.path.exists(expected_rd50_file):
                # just skip it
                print("Skipping rotd50/psa5 for station %s..." % (stat))
                continue

            # See if .rd50 file exists for comparison. If it don't
            # exist, skip it
            rd50_file = None
            if ("%s.rd50" % (stat)) in filelist:
                rd50_file = "%s.rd50" % (stat)
            else:
                # Skip this station
                continue

            # Plot rotd50 results
            rd50_filename1 = os.path.join(a_tmpdir_seis, rd50_file)
            rd50_filename2 = os.path.join(a_outdir,
                                          "%d.%s.rd50" % (sim_id, stat))
            outfile = os.path.join(
                a_outdir,
                "%s_%d_%s_rotd50.png" % (self.comp_label, sim_id, stat))

            plot_rotd50.plot_rd50(stat,
                                  rd50_filename1,
                                  rd50_filename2,
                                  self.comp_label,
                                  sim_id,
                                  outfile,
                                  site.low_freq_corner,
                                  site.high_freq_corner,
                                  quiet=True)

        print("Generating Plots Completed".center(80, '-'))
Example #51
0
    def init_dims(self):
        a_stationlist = os.path.join(self.install.A_IN_DATA_DIR,
                                     str(self.sim_id), self.station_file)
        if not os.path.isfile(a_stationlist):
            a_stationlist = os.path.join(os.getcwd(), self.station_file)
            if not os.path.isfile(a_stationlist):
                print("Error (plot_value_map): Unable to locate station file: "
                      "%s!" % self.station_file)
                sys.exit()
        self.station_file = a_stationlist
        print("Using Station File: %s" % (self.station_file))
        # a_statfile = (self.install.A_IN_DATA_DIR +
        #               "/%d/%s"%(self.sim_id,self.station_file))
        slo = StationList(self.station_file)
        site_list = slo.getStationList()
        w_lon = 0.0
        e_lon = 0.0
        n_lat = 0.0
        s_lat = 0.0
        for sites in site_list:
            slon = float(sites.lon)
            slat = float(sites.lat)
            if w_lon == 0.0:
                w_lon = slon
            elif slon < w_lon:
                w_lon = slon
            if e_lon == 0.0:
                e_lon = slon
            elif slon > e_lon:
                e_lon = slon
            if n_lat == 0.0:
                n_lat = slat
            elif slat > n_lat:
                n_lat = slat
            if s_lat == 0.0:
                s_lat = slat
            elif slat < s_lat:
                s_lat = slat
        self.rbounds = [(n_lat + 0.1), (s_lat - 0.1),
                        (e_lon + 0.1), (w_lon - 0.1)]
        print("Region Bounds: ", self.rbounds)

        self.nw = (self.rbounds[3], self.rbounds[0])
        self.sw = (self.rbounds[3], self.rbounds[1])
        self.se = (self.rbounds[2], self.rbounds[1])
        self.ne = (self.rbounds[2], self.rbounds[0])
        self.PLOT_MAP_LOC = [0.10, 0.15, 0.8, 0.8]

        self.origin = self.nw # North - West Corner
        self.x_invert = False
        self.y_invert = True

        rzone = 11
#        if self.region !=  None:
#            if self.region.getName() == "Northern California":
#                rzone = 10
#                print "Region : %s, UTM Zone: %d" % (self.region.getName(), rzone)
#        else:
        print("Region : None, UTM Zone: %d" % (rzone))

        pobj = Proj(proj='utm', zone=rzone, ellps='WGS84')
        self.offset = map(round, pobj(self.origin[0], self.origin[1]))
        #Calculate region dimension in km
        dim_y = math.ceil(GS.get_distance(self.nw, self.sw)) * (1000.0 / self.dx) #(KM*1000/dx)
        dim_x = math.ceil(GS.get_distance(self.sw, self.se)) * (1000.0 / self.dx)
        dim_z = 1.0 * (1000.0 / self.dx) #Just want to plot surface so we use 1 KM for Z
        self.dim = [int(dim_x), int(dim_y), int(dim_z)]
#               print "Self.dx, self.offset, self.dim:", self.dx, self.offset, self.dim

        self.projobj = Projection(self.dx, self.dim, self.offset, "utm", rzone)
        self.build_station_list(self.station_file)
        self.boundfile = self.build_coastline(self.coast_file, self.projobj,
                                              self.offset, self.dx, self.dim,
                                              self.x_invert, self.y_invert)

        return
Example #52
0
def load_all_data(comp_label, input_indir, input_obsdir,
                  combined_file, temp_dir, component):
    """
    This function loads all data from each station file
    and creates the structures needed for plotting.
    """
    data = {}
    
    # Get realizations
    realizations = sorted(os.listdir(input_indir))
    one_realization = realizations[0]
    basedir = os.path.join(input_indir, one_realization)

    # Get the GMPE data for the RZZ2015 metrics
    base_outdir = os.path.join(input_obsdir, one_realization,
                               "validations", "rzz2015_gmpe")
    a_rzz2015_gmpe = glob.glob("%s%s%s.rzz2015gmpe.txt" % (base_outdir,
                                                           os.sep,
                                                           one_realization))
    a_rzz2015_gmpe = a_rzz2015_gmpe[0]
    # Get the station list
    a_statfile = glob.glob("%s%s*.stl" % (basedir, os.sep))
    if len(a_statfile) != 1:
        raise bband_utils.ProcessingError("Cannot get station list!")
    a_statfile = a_statfile[0]
    slo = StationList(a_statfile)
    site_list = slo.getStationList()

    # Get source file
    a_srcfile = glob.glob("%s%s*.src" % (basedir, os.sep))
    if len(a_srcfile) != 1:
        raise bband_utils.ProcessingError("Cannot get src file!")
    a_srcfile = a_srcfile[0]

    # Parse it!
    src_keys = bband_utils.parse_src_file(a_srcfile)

    # Go through all stations
    for site in site_list:
        slon = float(site.lon)
        slat = float(site.lat)
        stat = site.scode

        # Calculate Rrup
        origin = (src_keys['lon_top_center'],
                  src_keys['lat_top_center'])
        dims = (src_keys['fault_length'], src_keys['dlen'],
                src_keys['fault_width'], src_keys['dwid'],
                src_keys['depth_to_top'])
        mech = (src_keys['strike'], src_keys['dip'],
                src_keys['rake'])

        site_geom = [float(site.lon), float(site.lat), 0.0]
        (fault_trace1, up_seis_depth,
         low_seis_depth, ave_dip,
         dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech)
        _, rrup, _ = putils.DistanceToSimpleFaultSurface(site_geom,
                                                         fault_trace1,
                                                         up_seis_depth,
                                                         low_seis_depth,
                                                         ave_dip)

        # Read data for this station
        data_file = os.path.join(temp_dir, "%s.rzz2015" % (stat))

        data[stat] = {}
        data[stat]["dist"] = rrup
        data[stat]["r1"] = []
        data[stat]["r2"] = []
        data[stat]["r3"] = []
        data[stat]["r4"] = []
        data[stat]["r5"] = []
        data[stat]["r1_obs"] = None
        data[stat]["r2_obs"] = None
        data[stat]["r3_obs"] = None
        data[stat]["r4_obs"] = None
        data[stat]["r5_obs"] = None
        data[stat]["r1_gmpe"] = None
        data[stat]["r2_gmpe"] = None
        data[stat]["r3_gmpe"] = None
        data[stat]["r4_gmpe"] = None
        data[stat]["r5_gmpe"] = None
        
        in_file = open(data_file, 'r')
        for line in in_file:
            line = line.strip()
            if line.startswith("#"):
                # Skip comments
                continue
            pieces = line.split(",")
            comp = pieces[1].strip()
            # Check if we want this component
            if component != "both":
                if comp != component:
                    # Skip
                    continue
            # We want this data point
            pieces = pieces[2:]
            pieces = [float(piece) for piece in pieces]
            # Get observation values
            if data[stat]["r1_obs"] is None:
                data[stat]["r1_obs"] = pieces[6]
            if data[stat]["r2_obs"] is None:
                data[stat]["r2_obs"] = pieces[8]
            if data[stat]["r3_obs"] is None:
                data[stat]["r3_obs"] = pieces[10]
            if data[stat]["r4_obs"] is None:
                data[stat]["r4_obs"] = pieces[12]
            if data[stat]["r5_obs"] is None:
                data[stat]["r5_obs"] = pieces[14]
            # Get simulated data values
            data[stat]["r1"].append(pieces[7])
            data[stat]["r2"].append(pieces[9])
            data[stat]["r3"].append(pieces[11])
            data[stat]["r4"].append(pieces[13])
            data[stat]["r5"].append(pieces[15])
        in_file.close()

    gmpe_file = open(a_rzz2015_gmpe, 'r')
    for line in gmpe_file:
        line = line.strip()
        # Skip comments
        if line.startswith("#"):
            continue
        pieces = line.split(",")
        stat = pieces[0].strip()
        pieces = pieces[1:]
        pieces = [float(piece.strip()) for piece in pieces]
        data[stat]["r1_gmpe"] = pieces[2]
        data[stat]["r2_gmpe"] = pieces[3]
        data[stat]["r3_gmpe"] = pieces[2]/pieces[3]
        data[stat]["r4_gmpe"] = pieces[5]
        data[stat]["r5_gmpe"] = pieces[6]
    gmpe_file.close()

    # Return all data
    return data
Example #53
0
def create_resid_data_file(comp_label, input_indir, input_obsdir,
                           combined_file, temp_dir):
    """
    This function creates a file containing the combined residuals
    from the simulation data from all stations
    """
    # Copy header for first file, set logfile
    if os.path.isfile(combined_file):
        # But not, if file already exists
        copy_header = 0
    else:
        copy_header = 1
    logfile = os.path.join(temp_dir, "log.txt")

    # Figure out where out binaries are
    if "BBP_DIR" in os.environ:
        install_root = os.path.normpath(os.environ["BBP_DIR"])
    else:
        raise bband_utils.ProcessingError("BBP_DIR is not set!")
    gp_bin_dir = os.path.join(install_root, "src", "gp", "bin")

    # Get realizations
    realizations = sorted(os.listdir(input_indir))
    one_realization = realizations[0]
    basedir = os.path.join(input_indir, one_realization)

    # Get the station list
    a_statfile = glob.glob("%s%s*.stl" % (basedir, os.sep))
    if len(a_statfile) != 1:
        raise bband_utils.ProcessingError("Cannot get station list!")
    a_statfile = a_statfile[0]
    slo = StationList(a_statfile)
    site_list = slo.getStationList()

    # Get source file
    a_srcfile = glob.glob("%s%s*.src" % (basedir, os.sep))
    if len(a_srcfile) != 1:
        raise bband_utils.ProcessingError("Cannot get src file!")
    a_srcfile = a_srcfile[0]

    # Parse it!
    src_keys = bband_utils.parse_src_file(a_srcfile)

    # Get the obsdir
    print input_obsdir
    realizations = sorted(os.listdir(input_obsdir))
    one_realization = realizations[0]
    basedir = os.path.join(input_obsdir, one_realization)
    obs_dir = glob.glob("%s%sobs_seis*" % (basedir, os.sep))
    if len(obs_dir) != 1:
        raise bband_utils.ProcessingError("Cannot get observation dir!")
    obs_dir = obs_dir[0]

    # Go through all stations
    for site in site_list:
        slon = float(site.lon)
        slat = float(site.lat)
        stat = site.scode

        # Calculate Rrup
        origin = (src_keys['lon_top_center'],
                  src_keys['lat_top_center'])
        dims = (src_keys['fault_length'], src_keys['dlen'],
                src_keys['fault_width'], src_keys['dwid'],
                src_keys['depth_to_top'])
        mech = (src_keys['strike'], src_keys['dip'],
                src_keys['rake'])

        site_geom = [float(site.lon), float(site.lat), 0.0]
        (fault_trace1, up_seis_depth,
         low_seis_depth, ave_dip,
         dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech)
        _, rrup, _ = putils.DistanceToSimpleFaultSurface(site_geom,
                                                         fault_trace1,
                                                         up_seis_depth,
                                                         low_seis_depth,
                                                         ave_dip)

        simfile1 = os.path.join(temp_dir, "%s.rd50" % (stat))
        datafile1 = os.path.join(obs_dir, "%s.rd50" % (stat))

        cmd = ("%s/gen_resid_tbl_3comp bbp_format=1 " % (gp_bin_dir) +
               "datafile1=%s simfile1=%s " % (datafile1, simfile1) + 
               "comp1=psa5n comp2=psa5e comp3=rotd50 " +
               "eqname=%s mag=0.0 stat=%s lon=%.4f lat=%.4f " %
               (comp_label, stat, slon, slat) +
               "vs30=%d cd=%.2f " % (site.vs30, rrup) +
               "flo=%f fhi=%f " % (site.low_freq_corner,
                                   site.high_freq_corner) +
               "print_header=%d >> %s 2>> %s" %
               (copy_header, combined_file, logfile))
        bband_utils.runprog(cmd, abort_on_error=True)

        if copy_header == 1:
            copy_header = 0
Example #54
0
File: uwsr.py Project: alborzgh/bbp
    def run(self):
        """
        This function prepares the parameters for UW Site response and then calls it
        """
        print("Nonlinear Site Response Analysis".center(80, '-'))

        install = InstallCfg.getInstance()
        sim_id = self.sim_id

        a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
        a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
        a_logdir = os.path.join(install.A_OUT_LOG_DIR, str(sim_id))
        a_indir  = os.path.join(install.A_IN_DATA_DIR, str(sim_id))

        a_statlist = os.path.join(a_indir, self.r_stations)
        slo = StationList(a_statlist)
        site_list = slo.getStationList()

        for idx,site in enumerate(site_list):
            print("==> Running nonlinear site response for station: %s" % (site.scode))
            
            # the velocity files for this site
            vel_file = os.path.join(a_outdir, "%d.%s.vel.bbp" %
                                    (sim_id, site.scode))

            log_file = os.path.join(a_logdir, "%d.%s.siteresponse.log" %
                                    (sim_id, site.scode))

            progstring = ("%s " %
                        (os.path.join(install.A_UW_BIN_DIR, "siteresponse")) +
                        "%s " % (self.r_locfile[idx]) +
                        "-bbp " +
                        "%s " % (vel_file)+
                        "%s " % (a_tmpdir)+
                        "%s " % (log_file))
            bband_utils.runprog(progstring)
            
            # copy results to the output directory
            tmp_acc_file = os.path.join(a_tmpdir, 'surface.acc')
            out_acc_file = os.path.join(a_outdir, "%d.%s.surf.acc.bbp" %
                                    (sim_id, site.scode))
            out_acc_png  = os.path.join(a_outdir, "%d.%s.surf.acc.png" %
                                    (sim_id, site.scode))
            self.convert_srt_to_bbp(tmp_acc_file, out_acc_file, "acc")

            tmp_vel_file = os.path.join(a_tmpdir, 'surface.vel')
            out_vel_file = os.path.join(a_outdir, "%d.%s.surf.vel.bbp" %
                                    (sim_id, site.scode))
            out_vel_png  = os.path.join(a_outdir, "%d.%s.surf.vel.png" %
                                    (sim_id, site.scode))
            self.convert_srt_to_bbp(tmp_vel_file, out_vel_file, "vel")

            # tmp_dsp_file = os.path.join(a_tmpdir, 'surface.disp')
            # out_dsp_file = os.path.join(a_outdir, "%d.%s.surf.disp.bbp" %
            #                         (sim_id, site.scode))
            # out_dsp_png  = os.path.join(a_outdir, "%d.%s.surf.disp.png" %
            #                         (sim_id, site.scode))
            # self.convert_srt_to_bbp(tmp_dsp_file, out_dsp_file, "disp")

            # plot seismograms at surface
            plot_seismograms.plot_seis(site.scode, out_acc_file, sim_id, 
                                                    'acc', out_acc_png)
            plot_seismograms.plot_seis(site.scode, out_vel_file, sim_id, 
                                                    'vel', out_vel_png)