Example #1
0
def create_entry(cmt_filename, param_path):
    # Define parameter directory
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")
    stations_path = os.path.join(param_path,
                                 "RequestParams/STATIONS")

    # Load Parameters
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())
    specfemspecs = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())

    # Check whether stationsfile in Parampath
    if os.path.exists(stations_path):
        stations_file = stations_path
    else:
        # if no stations file in the parameter directory,
        # the standard stations file is going to be used
        stations_file = None

    # Database Setup.
    DB = DataBaseSkeleton(basedir=DB_params["databasedir"],
                          cmt_fn=cmt_filename,
                          specfem_dir=specfemspecs["SPECFEM_DIR"],
                          stations_file=stations_file,
                          overwrite=DB_params['overwrite'])

    # Database Create entry
    DB.create_all()

    # Return new earthquake location.
    cmt_in_database = os.path.join(DB.Cdirs[0], "C" + DB.Cids[0])

    return cmt_in_database
Example #2
0
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")

    # Load Parameters
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())
    specfemspecs = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())

    # Database Setup.
    DB = DataBaseSkeleton(basedir=DB_params["databasedir"],
                          cmt_fn=cmt_filename,
                          specfem_dir=specfemspecs["SPECFEM_DIR"],
                          verbose=DB_params['verbose'],
                          overwrite=DB_params['overwrite'])

    # Database Create entry
    DB.create_all()

    # Return new earthquake location.
    cmt_in_database = os.path.join(DB.eq_dirs[0], "eq_" + DB.eq_ids[0])

    return cmt_in_database
Example #3
0
def Create_Entry(cmt_filename):
    """ This function creates a database entry for the inversion.
    :param cmt_filename: path to cmt file that an Entry is supposed to be
                         created for
    :return:
        no returns since it is simply a copy and create dirs function

    """

    # Define parameter directory
    param_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(
        __file__))), "params")
    databaseparam_path = os.path.join(param_path,
                                    "Database/DatabaseParameters.yml")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")

    # Load Parameters
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())
    specfemspecs = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())

    # Database Setup.
    DB = DataBaseSkeleton(basedir=DB_params["databasedir"],
                          cmt_fn=cmt_filename,
                          specfem_dir=specfemspecs["SPECFEM_DIR"],
                          verbose=DB_params['verbose'],
                          overwrite=DB_params['overwrite'])

    # Database Create entry
    DB.create_all()

    # Return new earthquake location.
    cmt_in_database = os.path.join(DB.Cdirs[0], "C" + DB.Cids[0])

    return cmt_in_database
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")
    comp_and_modules_path = os.path.join(
        param_path, "SpecfemParams/CompilersAndModules.yml")

    # Load Parameters
    specfemspecs = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())
    cm_dict = smart_read_yaml(comp_and_modules_path, mpi_mode=is_mpi_env())

    cmt_dir = os.path.dirname(os.path.abspath(cmt_filename))

    RD = RunSimulation(cmt_dir,
                       N=specfemspecs['nodes'],
                       n=specfemspecs['tasks'],
                       npn=specfemspecs['tasks_per_node'],
                       memory_req=specfemspecs['memory_req'],
                       modules=cm_dict['modulelist'],
                       gpu_module=cm_dict['gpu_module'],
                       GPU_MODE=specfemspecs["GPU_MODE"],
                       walltime=specfemspecs['walltime_solver'],
                       verbose=specfemspecs['verbose'])

    # Print Run specifications of verbose is True
    if specfemspecs['verbose']:
        print(RD)

    # Run Simulation by calling the class
    RD()
Example #5
0
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")

    # Load Database Parameters
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())

    # Get processing path from cmt_filename in database
    cmt_dir = os.path.dirname(os.path.abspath(cmt_filename))

    # Processed data directory
    processed_dir = os.path.join(cmt_dir, "seismograms",
                                 "processed_seismograms")

    # Inversion dictionary directory
    inv_dict_dir = os.path.join(cmt_dir, "inversion", "inversion_dicts")

    # Get all files to be processed
    processed_files = glob.glob(os.path.join(processed_dir, "*observed*"))

    if DB_params["verbose"]:
        print("\n Creating inversion dictionaries ...\n")

    for _i, processed_file in enumerate(processed_files):

        # Get processing band
        bandstring = str(os.path.basename(processed_file)).split(".")[1]
        band = [float(x) for x in bandstring.split("_")]

        if DB_params["verbose"]:
            print("\nCreating inversion dictionary for period band:")
            print("Low: %d s || High: %d s \n" % tuple(band))
            print("...\n")

        params = create_inversion_dict(processed_dir, bandstring)

        # Print Inversion parameters:
        if DB_params["verbose"]:
            print("Files")
            print("_______________________________________________________\n")
            for key, value in params.items():
                print(key + ":", value)
            print("_______________________________________________________\n")

        # Outputfile:
        outfilename = os.path.join(
            inv_dict_dir, "inversion_file_dict." + bandstring + ".yml")

        # Write yaml file to inversion dictionary directory
        write_yaml_file(params, outfilename)

        if DB_params["verbose"]:
            print("\nDONE writing %s.\n" % outfilename)

    if DB_params["verbose"]:
        print("\nDONE writing all dictionaries!\n")
Example #6
0
def main(cmt_filename):

    # Set directories of the parameter files
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    request_param_path = os.path.join(param_path,
                                      "RequestParams/RequestParams.yml")

    # Read the parameter file
    rCparams = smart_read_yaml(request_param_path, mpi_mode=is_mpi_env())

    # Earthquake and Station parameters
    cmt_dir = os.path.dirname(cmt_filename)

    # Create Request Object
    Request = DataRequest.from_file(cmt_filename,
                                    duration=rCparams['duration'],
                                    channels=rCparams['channels'],
                                    locations=rCparams['locations'],
                                    starttime_offset=\
                                    rCparams['starttime_offset'],
                                    outputdir=cmt_dir)

    # Print Earthquake Download Info
    print(Request)

    # Request download
    Request.download()
Example #7
0
def data_request(cmt_filename, param_path):

    # Set directories of the parameter files
    request_param_path = os.path.join(param_path,
                                      "RequestParams/RequestParams.yml")

    # Read the parameter file
    rCparams = smart_read_yaml(request_param_path, mpi_mode=is_mpi_env())

    # Earthquake and Station parameters
    cmt_dir = os.path.dirname(cmt_filename)
    station_dir = os.path.join(cmt_dir, "station_data")

    # Get STATIONS file from CMT directory
    stationsfile = os.path.join(station_dir, "STATIONS")

    # Create Request Object
    Request = DataRequest.from_file(cmt_filename,
                                    stationlistfname=stationsfile,
                                    sfstationlist=True,
                                    duration=rCparams['duration'],
                                    channels=rCparams['channels'],
                                    locations=rCparams['locations'],
                                    starttime_offset=rCparams[
                                        'starttime_offset'],
                                    outputdir=cmt_dir)

    # Print Earthquake Download Info
    for line in Request.__str__().splitlines():
        logger.info(line)

    # Request download
    Request.download()
Example #8
0
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")
    inversionparam_path = os.path.join(param_path,
                                       "CMTInversion/InversionParams.yml")

    # Load Parameters
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())

    # Inversion Params
    INV_params = smart_read_yaml(inversionparam_path, mpi_mode=is_mpi_env())

    # File and directory
    cmt_dir = os.path.dirname(cmt_filename)
    cmt = CMTSource.from_CMTSOLUTION_file(cmt_filename)
    outdir = os.path.join(cmt_dir, "CMT_SIMs")

    # Basic parameters
    dm = float(INV_params["config"]["dmoment"])  # 10**22 dyne*cm
    dz = float(INV_params["config"]["ddepth"])  # 1000 m
    ddeg = float(INV_params["config"]["dlocation"])  # 0.001 deg

    if DB_params["verbose"]:
        print("\n")
        print("  Perturbation parameters")
        print("  " + 50 * "*")
        print("  𝚫M: %g" % dm)
        print("  𝚫z: %g" % dz)
        print("  𝚫deg: %g" % ddeg)
        print("  " + 50 * "*" + "\n")

    # Create source creation class
    sfsource = SpecfemSources(cmt,
                              npar=DB_params['npar'],
                              dm=dm,
                              dx=dz,
                              ddeg=ddeg,
                              verbose=DB_params['verbose'],
                              outdir=outdir)

    # Write sources
    sfsource.write_sources()
Example #9
0
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(
        __file__))), "params")
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")

    # Load Database Parameters
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())

    # Get processing path from cmt_filename in database
    cmt_dir = os.path.dirname(os.path.abspath(cmt_filename))
    window_path_dir = os.path.join(cmt_dir, "window_data", "window_paths")

    # Get all files to be processed
    window_pathfiles = glob.glob(os.path.join(window_path_dir, "*"))

    if DB_params["verbose"]:
        print("\nStart windowing all trace pairs ...\n")

    for _i, path_file in enumerate(window_pathfiles):

        if DB_params["verbose"]:
            print("\nWindowing path file:\n")
            print(path_file + "\n")
            print("Start windowing traces from path file ...\n")

        # Load process path file to get parameter file location
        params = smart_read_yaml(path_file, mpi_mode=is_mpi_env())\
            ["window_param_file"]

        # Create Smart Process class
        proc = WindowASDF(path_file, params, verbose=DB_params["verbose"],
                          debug=False)
        proc.smart_run()

        if DB_params["verbose"]:
            print("\nDONE windowing traces file.\n")

    if DB_params["verbose"]:
        print("\nDONE windowed all data!\n")
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")
    comp_and_modules_path = os.path.join(
        param_path, "SpecfemParams/CompilersAndModules.yml")

    # Load Parameters
    specfemspecs = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())
    cm_dict = smart_read_yaml(comp_and_modules_path, mpi_mode=is_mpi_env())

    cmt_dir = os.path.dirname(os.path.abspath(cmt_filename))

    # Set up RunSimulation class with parameters from the files.
    RD = RunSimulation(cmt_dir,
                       N=specfemspecs['nodes'],
                       n=specfemspecs['tasks'],
                       npn=specfemspecs['tasks_per_node'],
                       memory_req=specfemspecs['memory_req'],
                       modules=cm_dict['modulelist'],
                       gpu_module=cm_dict['gpu_module'],
                       GPU_MODE=specfemspecs["GPU_MODE"],
                       walltime=specfemspecs['walltime_solver'],
                       verbose=specfemspecs['verbose'])

    if specfemspecs["verbose"]:
        print("Deleting unnecessary stuff ...")

    # Clean up Simulation directory
    RD.clean_up()

    if specfemspecs["verbose"]:
        print("Deleting DONE.")
Example #11
0
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(
        __file__))), "params")
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")

    # Load Parameters
    DB_params = smart_read_yaml(databaseparam_path,
                                mpi_mode=is_mpi_env())

    # File and directory
    cmt_dir = os.path.dirname(cmt_filename)
    sim_dir = os.path.join(cmt_dir, "CMT_SIMs")

    attr = ["CMT", "CMT_rr", "CMT_tt", "CMT_pp", "CMT_rt", "CMT_rp",
            "CMT_tp", "CMT_depth", "CMT_lat", "CMT_lon"]

    ##### Converting the synthetic data
    if DB_params['verbose']:
        print("\nConverting synthetic traces to ASDF ... \n")

    for _i, at in enumerate(attr[:DB_params["npar"]+1]):

        # Path file
        syn_path_file = os.path.join(sim_dir, at, at + ".yml")

        converter = ConvertASDF(syn_path_file, verbose=DB_params["verbose"],
                                status_bar=DB_params["verbose"])
        converter.run()

    ##### Converting the observed data
    if DB_params['verbose']:
        print("\nConverting observed traces to ASDF ... \n")

    obs_path_file = os.path.join(cmt_dir, "seismograms", "obs", "observed.yml")

    converter = ConvertASDF(obs_path_file, verbose=DB_params["verbose"],
                            status_bar=DB_params["verbose"])
    converter.run()


    if DB_params['verbose']:
        print("\nConversion to ASDF DONE.\n")
Example #12
0
def data_request(cmt_filename):
    # Set directories of the parameter files
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")

    request_param_path = os.path.join(param_path,
                                      "RequestParams/RequestParams.yml")

    # Read the parameter file
    rCparams = smart_read_yaml(request_param_path, mpi_mode=is_mpi_env())

    # Earthquake and Station parameters
    cmt_dir = os.path.dirname(cmt_filename)
    station_dir = os.path.join(cmt_dir, "station_data")

    # Get STATIONS file from CMT directory
    stationsfile = os.path.join(station_dir, "STATIONS")

    # Observed output dir
    obsd_dir = os.path.join(cmt_dir, "seismograms", "obs")

    # CMT parameter input
    cmt = CMTSource.from_CMTSOLUTION_file(cmt_filename)
    duration = rCparams['duration']
    starttime_offset = rCparams['starttime_offset']

    starttime = cmt.origin_time + starttime_offset
    endtime = starttime + duration

    # Get station_list from station_file in database entry
    stations = read_station_file(stationsfile)
    station_ids = [station[0] + "_" + station[1] for station in stations]

    # Download Station Data
    _, _, filtered_station_ids = \
        download_stationxml(station_ids, starttime, endtime,
                            outputdir=station_dir, client=None,
                            level="response")

    # Download waveform
    download_waveform(filtered_station_ids,
                      starttime,
                      endtime,
                      outputdir=obsd_dir,
                      client=None)
Example #13
0
    def test__create_syn_path_yaml(self):
        """Testing the creation of the yaml file."""

        with tempfile.TemporaryDirectory() as tmp_dir:

            # Cmtfile path
            cmtfile = os.path.join(DATA_DIR, "CMTSOLUTION")

            # Initialize database skeleton class
            DB = DataBaseSkeleton(basedir=tmp_dir,
                                  cmt_fn=cmtfile,
                                  specfem_dir=self.specfem_dir,
                                  verbose=True)

            # Create database
            DB.create_all()

            # Read the yaml_file which should be created in the CMT directory
            yaml_file = os.path.join(DB.eq_dirs[0], "CMT_SIMs", "CMT_rr",
                                     "CMT_rr.yml")

            # Solution should be:
            waveform_dir = os.path.join(DB.eq_dirs[0], "CMT_SIMs", "CMT_rr",
                                        "OUTPUT_FILES")
            tag = 'syn'
            filetype = 'sac'
            output_file = os.path.join(DB.eq_dirs[0], "seismograms", "syn",
                                       "CMT_rr.h5")
            quakeml_file = os.path.join(DB.eq_dirs[0], "CMT_SIMs", "CMT_rr",
                                        "OUTPUT_FILES", "Quake.xml")

            d = smart_read_yaml(yaml_file, mpi_mode=False)

            # Assessing correctness of yaml file
            self.assertTrue(d["quakeml_file"] == quakeml_file)
            self.assertTrue(d["tag"] == tag)
            self.assertTrue(d["output_file"] == output_file)
            self.assertTrue(d["filetype"] == filetype)
            self.assertTrue(d["waveform_dir"] == waveform_dir)
Example #14
0
    def test__create_obs_path_yaml(self):
        """Testing the creation of the yaml file."""

        with tempfile.TemporaryDirectory() as tmp_dir:
            # Cmtfile path
            cmtfile = os.path.join(DATA_DIR, "CMTSOLUTION")

            # Initialize database skeleton class
            DB = DataBaseSkeleton(basedir=tmp_dir,
                                  cmt_fn=cmtfile,
                                  specfem_dir=self.specfem_dir,
                                  verbose=True)

            # Create database
            DB.create_all()

            # Read the yaml_file which should be created in the CMT directory
            yaml_file = os.path.join(DB.eq_dirs[0], "seismograms", "obs",
                                     "observed.yml")

            # Solution should be:
            waveform_files = os.path.join(DB.eq_dirs[0], "seismograms", "obs",
                                          DB.eq_ids[0] + ".mseed")
            staxml = os.path.join(DB.eq_dirs[0], "station_data", "station.xml")
            tag = 'obs'
            output_file = os.path.join(DB.eq_dirs[0], "seismograms", "obs",
                                       "raw_observed.h5")
            quakeml_file = os.path.join(DB.eq_dirs[0],
                                        "eq_" + DB.eq_ids[0] + ".xml")

            d = smart_read_yaml(yaml_file, mpi_mode=False)

            # Assessing correctness of yaml file
            self.assertTrue(d["quakeml_file"] == quakeml_file)
            self.assertTrue(d["tag"] == tag)
            self.assertTrue(d["output_file"] == output_file)
            self.assertTrue(d["waveform_files"] == waveform_files)
            self.assertTrue(d["staxml_files"] == staxml)
Example #15
0
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")

    # Load Parameters
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())

    if DB_params["verbose"]:
        print("Creating processing path files for the observed data...")

    # Create Processing path files observed
    process_obs_dir = os.path.join(param_path, "ProcessObserved")
    create_process_path_obs(cmt_filename, process_obs_dir, verbose=True)

    if DB_params["verbose"]:
        print("Creating processing path files for the synthetic data...")

    # Create Processing path files synthetics
    process_syn_dir = os.path.join(param_path, "ProcessSynthetic")

    create_process_path_syn(cmt_filename,
                            process_syn_dir,
                            DB_params["npar"],
                            verbose=True)

    if DB_params["verbose"]:
        print("Creating processing path files for windowing the data...")

    # Create Window Path Files:
    window_dir = os.path.join(param_path, "CreateWindows")
    create_window_path(cmt_filename,
                       window_dir,
                       figure_mode=DB_params["figure_mode"],
                       verbose=True)
Example #16
0
def data_request(cmt_filename, param_path):

    # Request config_file
    request_param_path = os.path.join(param_path,
                                      "RequestParams/RequestParams.yml")

    # Read the parameter file
    rCparams = smart_read_yaml(request_param_path, mpi_mode=is_mpi_env())

    # Earthquake and Station parameters
    cmt_dir = os.path.dirname(cmt_filename)
    station_dir = os.path.join(cmt_dir, "station_data")

    # Get STATIONS file from CMT directory
    stationsfile = os.path.join(station_dir, "STATIONS")

    # Observed output dir
    obsd_dir = os.path.join(cmt_dir, "seismograms", "obs")

    # CMT parameter input
    cmt = CMTSource.from_CMTSOLUTION_file(cmt_filename)
    duration = rCparams['duration']
    starttime_offset = rCparams['starttime_offset']

    starttime = cmt.origin_time + starttime_offset
    endtime = starttime + duration

    # Get station_list from station_file in database entry
    stations = read_station_file(stationsfile)

    # Create list of networks to download from
    networks = list(set([station[0] for station in stations]))
    network_string = ",".join(networks)

    # Set domain containing all locations
    # Rectangular domain containing parts of southern Germany.
    domain = RectangularDomain(minlatitude=-90,
                               maxlatitude=90,
                               minlongitude=-180,
                               maxlongitude=180)

    # Set download restrictions
    restrictions = Restrictions(
        starttime=starttime,
        endtime=endtime,
        reject_channels_with_gaps=False,
        minimum_length=float(rCparams['minimum_length']),
        # Trace needs to be almost full length
        network=network_string,  # Only certain networks
        channel=",".join(rCparams['channels']),
        location=",".join(rCparams['locations']))

    # No specified providers will result in all known ones being queried.
    providers = ["IRIS"]
    mdl = MassDownloader(providers=providers)
    # The data will be downloaded to the ``./waveforms/`` and ``./stations/``
    # folders with automatically chosen file n
    stationxml_storage = os.path.join(station_dir)
    waveform_storage = os.path.join(obsd_dir)
    logger.info("MSEEDs: %s" % waveform_storage)
    logger.info("XMLs: %s" % stationxml_storage)

    mdl.download(domain,
                 restrictions,
                 mseed_storage=waveform_storage,
                 stationxml_storage=stationxml_storage)
def Fix_Specfem():
    """This Function uses the parameters found in
    ::

        GCMT3D/workflow/params/SpecfemParams/SpecfemParams.yml

    and

    ::

        GCMT3D/workflow/params/SpecfemParams/CompilersAndModules.yml

    to fix and recompile the software which is necessary for the simulation
    and inversion of the data.

    Since all data are taken from the parameter files. there are no in- and
    outputs.

    """
    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")
    comp_and_modules_path = os.path.\
        join(param_path, "SpecfemParams/CompilersAndModules.yml")

    # Load parameters
    sf_dict = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())
    cm_dict = smart_read_yaml(comp_and_modules_path, mpi_mode=is_mpi_env())

    # Define the specfemdatafixer
    df = DATAFixer(
        sf_dict["SPECFEM_DIR"],
        NEX_XI=sf_dict["NEX_XI"],
        NEX_ETA=sf_dict["NEX_ETA"],
        NPROC_XI=sf_dict["NPROC_XI"],
        NPROC_ETA=sf_dict["NPROC_ETA"],
        ROTATE_SEISMOGRAMS_RT=sf_dict["ROTATE_SEISMOGRAMS_RT"],
        RECORD_LENGTH=sf_dict["RECORD_LENGTH"],
        MODEL=sf_dict["MODEL"],
        WRITE_SEISMOGRAMS_BY_MASTER=sf_dict["WRITE_SEISMOGRAMS_BY_MASTER"],
        OUTPUT_SEISMOS_ASCII_TEXT=sf_dict["OUTPUT_SEISMOS_ASCII_TEXT"],
        OUTPUT_SEISMOS_SAC_ALPHANUM=sf_dict["OUTPUT_SEISMOS_SAC_ALPHANUM"],
        OUTPUT_SEISMOS_SAC_BINARY=sf_dict["OUTPUT_SEISMOS_SAC_BINARY"],
        OUTPUT_SEISMOS_ASDF=sf_dict["OUTPUT_SEISMOS_ASDF"],
        MOVIE_SURFACE=sf_dict["MOVIE_SURFACE"],
        MOVIE_VOLUME=sf_dict["MOVIE_VOLUME"],
        MOVIE_COARSE=sf_dict["MOVIE_COARSE"],
        GPU_MODE=sf_dict["GPU_MODE"],
        GPU_RUNTIME=sf_dict["GPU_RUNTIME"],
        GPU_PLATFORM=sf_dict["GPU_PLATFORM"],
        GPU_DEVICE=sf_dict["GPU_DEVICE"],
        ADIOS_ENABLED=sf_dict["ADIOS_ENABLED"],
        ADIOS_FOR_FORWARD_ARRAYS=sf_dict["ADIOS_FOR_FORWARD_ARRAYS"],
        ADIOS_FOR_MPI_ARRAYS=sf_dict["ADIOS_FOR_MPI_ARRAYS"],
        ADIOS_FOR_ARRAYS_SOLVER=sf_dict["ADIOS_FOR_ARRAYS_SOLVER"],
        ADIOS_FOR_SOLVER_MESHFILES=sf_dict["ADIOS_FOR_SOLVER_MESHFILES"],
        ADIOS_FOR_AVS_DX=sf_dict["ADIOS_FOR_AVS_DX"],
        ADIOS_FOR_KERNELS=sf_dict["ADIOS_FOR_KERNELS"],
        ADIOS_FOR_MODELS=sf_dict["ADIOS_FOR_MODELS"],
        ADIOS_FOR_UNDO_ATTENUATION=sf_dict["ADIOS_FOR_UNDO_ATTENUATION"],
        modules=cm_dict["modulelist"],
        gpu_module=cm_dict["gpu_module"],
        gpu_version=cm_dict["gpu_version"],
        cc=cm_dict["cc"],
        cpp=cm_dict["cpp"],
        mpicc=cm_dict["mpicc"],
        f90=cm_dict["f90"],
        mpif90=cm_dict["mpif90"],
        nodes=sf_dict["nodes"],
        tasks=sf_dict["tasks"],
        tasks_per_node=sf_dict["tasks_per_node"],
        walltime=sf_dict["walltime"],
        walltime_solver=sf_dict["walltime"],
        memory_req=sf_dict["memory_req"],
        verbose=sf_dict["verbose"])

    # Run `Par_file` fixer.
    df.fix_parfiles()

    # Configure and compile
    df.configure_and_make()

    # Run the mesher to a slurm scheduler.
    df.run_mesher()
Example #18
0
def main():
    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")
    comp_and_modules_path = os.path.join(
        param_path, "SpecfemParams/CompilersAndModules.yml")

    # Load parameters
    sf_dict = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())
    cm_dict = smart_read_yaml(comp_and_modules_path, mpi_mode=is_mpi_env())

    # Define the specfemdatafixer
    DF = DATAFixer(
        sf_dict["SPECFEM_DIR"],
        NEX_XI=sf_dict["NEX_XI"],
        NEX_ETA=sf_dict["NEX_ETA"],
        NPROC_XI=sf_dict["NPROC_XI"],
        NPROC_ETA=sf_dict["NPROC_ETA"],
        ROTATE_SEISMOGRAMS_RT=sf_dict["ROTATE_SEISMOGRAMS_RT"],
        RECORD_LENGTH=sf_dict["RECORD_LENGTH"],
        MODEL=sf_dict["MODEL"],
        WRITE_SEISMOGRAMS_BY_MASTER=sf_dict["WRITE_SEISMOGRAMS_BY_MASTER"],
        OUTPUT_SEISMOS_ASCII_TEXT=sf_dict["OUTPUT_SEISMOS_ASCII_TEXT"],
        OUTPUT_SEISMOS_SAC_ALPHANUM=sf_dict["OUTPUT_SEISMOS_SAC_ALPHANUM"],
        OUTPUT_SEISMOS_SAC_BINARY=sf_dict["OUTPUT_SEISMOS_SAC_BINARY"],
        OUTPUT_SEISMOS_ASDF=sf_dict["OUTPUT_SEISMOS_ASDF"],
        MOVIE_SURFACE=sf_dict["MOVIE_SURFACE"],
        MOVIE_VOLUME=sf_dict["MOVIE_VOLUME"],
        MOVIE_COARSE=sf_dict["MOVIE_COARSE"],
        GPU_MODE=sf_dict["GPU_MODE"],
        GPU_RUNTIME=sf_dict["GPU_RUNTIME"],
        GPU_PLATFORM=sf_dict["GPU_PLATFORM"],
        GPU_DEVICE=sf_dict["GPU_DEVICE"],
        ADIOS_ENABLED=sf_dict["ADIOS_ENABLED"],
        ADIOS_FOR_FORWARD_ARRAYS=sf_dict["ADIOS_FOR_FORWARD_ARRAYS"],
        ADIOS_FOR_MPI_ARRAYS=sf_dict["ADIOS_FOR_MPI_ARRAYS"],
        ADIOS_FOR_ARRAYS_SOLVER=sf_dict["ADIOS_FOR_ARRAYS_SOLVER"],
        ADIOS_FOR_SOLVER_MESHFILES=sf_dict["ADIOS_FOR_SOLVER_MESHFILES"],
        ADIOS_FOR_AVS_DX=sf_dict["ADIOS_FOR_AVS_DX"],
        ADIOS_FOR_KERNELS=sf_dict["ADIOS_FOR_KERNELS"],
        ADIOS_FOR_MODELS=sf_dict["ADIOS_FOR_MODELS"],
        ADIOS_FOR_UNDO_ATTENUATION=sf_dict["ADIOS_FOR_UNDO_ATTENUATION"],
        modules=cm_dict["modulelist"],
        gpu_module=cm_dict["gpu_module"],
        gpu_version=cm_dict["gpu_version"],
        cc=cm_dict["cc"],
        cpp=cm_dict["cpp"],
        mpicc=cm_dict["mpicc"],
        f90=cm_dict["f90"],
        mpif90=cm_dict["mpif90"],
        nodes=sf_dict["nodes"],
        tasks=sf_dict["tasks"],
        tasks_per_node=sf_dict["tasks_per_node"],
        walltime=sf_dict["walltime"],
        walltime_solver=sf_dict["walltime_solver"],
        memory_req=sf_dict["memory_req"],
        verbose=sf_dict["verbose"])

    # Run the mesher to a slurm scheduler.
    DF.run_mesher()
Example #19
0
def main(cmt_filename):

    # Define parameter directory
    param_path = os.path.join(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "params")

    # Load Database Parameters
    databaseparam_path = os.path.join(param_path,
                                      "Database/DatabaseParameters.yml")
    DB_params = smart_read_yaml(databaseparam_path, mpi_mode=is_mpi_env())

    # Inversion Params
    inversionparam_path = os.path.join(param_path,
                                       "CMTInversion/InversionParams.yml")
    INV_params = smart_read_yaml(inversionparam_path, mpi_mode=is_mpi_env())

    # Get processing path from cmt_filename in database
    cmt_dir = os.path.dirname(os.path.abspath(cmt_filename))

    # Create cmt source:
    cmtsource = CMTSource.from_CMTSOLUTION_file(cmt_filename)

    # Window directory
    window_dir = os.path.join(cmt_dir, "window_data")

    # Inversion dictionary directory
    inv_dict_dir = os.path.join(cmt_dir, "inversion", "inversion_dicts")

    # Inversion dictionaries
    inv_dicts = glob.glob(os.path.join(inv_dict_dir, "*"))

    # Inversion output directory
    inv_out_dir = os.path.join(cmt_dir, "inversion", "inversion_output")

    if DB_params["verbose"]:
        print("\n#######################################################")
        print("#                                                     #")
        print("#      Starting inversion ...                         #")
        print("#                                                     #")
        print("#######################################################\n")

    # Creating Data container
    dcon = DataContainer(parlist=PARLIST[:DB_params["npar"]])

    for _i, inv_dict in enumerate(inv_dicts):

        # Get processing band
        bandstring = str(os.path.basename(inv_dict)).split(".")[1]
        band = [float(x) for x in bandstring.split("_")]

        if DB_params["verbose"]:
            print("\n")
            print("  " + 54 * "*")
            print("  Getting data for inversion from period band:")
            print("  Low: %d s || High: %d s" % tuple(band))
            print("  " + 54 * "*" + "\n")

        # Load inversion file dictionary
        asdf_dict = smart_read_yaml(inv_dict, mpi_mode=is_mpi_env())
        window_files = glob.glob(
            os.path.join(window_dir,
                         "windows." + bandstring + "*[!stats].json"))

        # Adding measurements

        for _j, window_file in enumerate(window_files):
            # Print Inversion parameters:
            if DB_params["verbose"]:
                print("  Adding measurements to data container:")
                print(
                    "  _____________________________________________________\n"
                )

            # Add measurements from ASDF file and windowfile
            # if _j == 0:
            if DB_params["verbose"]:
                print("  Window file:\n", "  ", window_file)
                print("\n  ASDF files:")
                for key, value in asdf_dict.items():
                    print("    ", key + ":", value)
            dcon.add_measurements_from_asdf(window_file, asdf_dict)
            # else:
            #     if DB_params["verbose"]:
            #         print("  Window file:\n", "  ", window_file)
            #     dcon.add_measurements_from_sac(window_file)

            if DB_params["verbose"]:
                print(
                    "  _____________________________________________________\n"
                )
                print("   ... \n\n")

    if DB_params["verbose"]:
        print("  Inverting for a new moment tensor .... ")
        print("  " + 54 * "*" + "\n\n")

    # Setting up weight config
    weight_config = DefaultWeightConfig(normalize_by_energy=False,
                                        normalize_by_category=False,
                                        comp_weight={
                                            "Z": 1.0,
                                            "R": 1.0,
                                            "T": 1.0
                                        },
                                        love_dist_weight=1.0,
                                        pnl_dist_weight=1.0,
                                        rayleigh_dist_weight=1.0,
                                        azi_exp_idx=0.5)

    # Setting up general inversion config
    config = Config(DB_params["npar"],
                    dlocation=float(INV_params["config"]["dlocation"]),
                    ddepth=float(INV_params["config"]["ddepth"]),
                    dmoment=float(INV_params["config"]["dmoment"]),
                    weight_data=True,
                    station_correction=True,
                    zero_trace=True,
                    double_couple=False,
                    bootstrap=True,
                    bootstrap_repeat=100,
                    weight_config=weight_config)

    srcinv = Cmt3D(cmtsource, dcon, config)
    srcinv.source_inversion()

    # plot result
    srcinv.plot_summary(inv_out_dir, figure_format="pdf")

    if DB_params["verbose"]:
        print("  DONE inversion for period band: %d - %d s.\n" % tuple(band))

    if DB_params["verbose"]:
        print("\n#######################################################")
        print("#                                                     #")
        print("#      Inversion DONE.                                #")
        print("#                                                     #")
        print("#######################################################\n")
def main():
    """This script controlls the compilation and setting up of
    specfem3d_globe.

    Usage:
        In the command line
        ``00_Fix_Specfem_And_Recompile.py``

    It uses the parameter files located in
    ``../params/SpecfemParams/SpecfemParams.yml``
    and ``../params/SpecfemParams/CompAndModules.yaml`` to get all necessary
    parameters.

    """

    # Define parameter directory
    param_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(
        __file__))), "params")
    specfemspec_path = os.path.join(param_path,
                                    "SpecfemParams/SpecfemParams.yml")
    comp_and_modules_path = os.path.join(param_path,
                                    "SpecfemParams/CompilersAndModules.yml")

    # Load parameters
    sf_dict = smart_read_yaml(specfemspec_path, mpi_mode=is_mpi_env())
    cm_dict = smart_read_yaml(comp_and_modules_path, mpi_mode=is_mpi_env())

    # Define the specfemdatafixer
    DF = DATAFixer(sf_dict["SPECFEM_DIR"],
                   NEX_XI=sf_dict["NEX_XI"], NEX_ETA=sf_dict["NEX_ETA"],
                   NPROC_XI=sf_dict["NPROC_XI"], NPROC_ETA=sf_dict["NPROC_ETA"],
                   ROTATE_SEISMOGRAMS_RT=sf_dict["ROTATE_SEISMOGRAMS_RT"],
                   RECORD_LENGTH=sf_dict["RECORD_LENGTH"],
                   MODEL=sf_dict["MODEL"],
                   WRITE_SEISMOGRAMS_BY_MASTER=sf_dict[
                       "WRITE_SEISMOGRAMS_BY_MASTER"],
                   OUTPUT_SEISMOS_ASCII_TEXT=sf_dict[
                       "OUTPUT_SEISMOS_ASCII_TEXT"],
                   OUTPUT_SEISMOS_SAC_ALPHANUM=sf_dict[
                       "OUTPUT_SEISMOS_SAC_ALPHANUM"],
                   OUTPUT_SEISMOS_SAC_BINARY=sf_dict[
                       "OUTPUT_SEISMOS_SAC_BINARY"],
                   OUTPUT_SEISMOS_ASDF=sf_dict["OUTPUT_SEISMOS_ASDF"],
                   MOVIE_SURFACE=sf_dict["MOVIE_SURFACE"],
                   MOVIE_VOLUME=sf_dict["MOVIE_VOLUME"],
                   MOVIE_COARSE=sf_dict["MOVIE_COARSE"],
                   GPU_MODE=sf_dict["GPU_MODE"],
                   GPU_RUNTIME=sf_dict["GPU_RUNTIME"],
                   GPU_PLATFORM=sf_dict["GPU_PLATFORM"],
                   GPU_DEVICE=sf_dict["GPU_DEVICE"],
                   ADIOS_ENABLED=sf_dict["ADIOS_ENABLED"],
                   ADIOS_FOR_FORWARD_ARRAYS=sf_dict["ADIOS_FOR_FORWARD_ARRAYS"],
                   ADIOS_FOR_MPI_ARRAYS=sf_dict["ADIOS_FOR_MPI_ARRAYS"],
                   ADIOS_FOR_ARRAYS_SOLVER=sf_dict["ADIOS_FOR_ARRAYS_SOLVER"],
                   ADIOS_FOR_SOLVER_MESHFILES=sf_dict[
                       "ADIOS_FOR_SOLVER_MESHFILES"],
                   ADIOS_FOR_AVS_DX=sf_dict["ADIOS_FOR_AVS_DX"],
                   ADIOS_FOR_KERNELS=sf_dict["ADIOS_FOR_KERNELS"],
                   ADIOS_FOR_MODELS=sf_dict["ADIOS_FOR_MODELS"],
                   ADIOS_FOR_UNDO_ATTENUATION=sf_dict[
                       "ADIOS_FOR_UNDO_ATTENUATION"],
                   modules=cm_dict["modulelist"],
                   gpu_module=cm_dict["gpu_module"],
                   gpu_version=cm_dict["gpu_version"],
                   cc=cm_dict["cc"],
                   cpp=cm_dict["cpp"],
                   mpicc=cm_dict["mpicc"],
                   f90=cm_dict["f90"],
                   mpif90=cm_dict["mpif90"],
                   nodes=sf_dict["nodes"],
                   tasks=sf_dict["tasks"],
                   tasks_per_node=sf_dict["tasks_per_node"],
                   walltime=sf_dict["walltime"],
                   walltime_solver=sf_dict["walltime_solver"],
                   memory_req=sf_dict["memory_req"],
                   verbose=sf_dict["verbose"])

    # Run `Par_file` fixer.
    DF.fix_parfiles()

    # Configure and compile
    DF.configure_and_make()

    # Run the mesher to a slurm scheduler.
    DF.run_mesher()