def test_id_lat_lon_ele_are_necessary(self):
        """
        Tests that some station fields need to be set.
        """
        # Station with missing id.
        station_1 = {"latitude": 47.737167,
             "longitude": 11.2752,
             "elevation_in_m": 565.0}
        # Station with missing latitude.
        station_2 = {"id": "BW.FURT",
             "longitude": 11.2752,
             "elevation_in_m": 565.0}
        # Station with missing longitude.
        station_3 = {"id": "BW.FURT",
             "latitude": 47.737167,
             "elevation_in_m": 565.0}
        # Station with missing elevation.
        station_4 = {"id": "BW.FURT",
             "latitude": 47.737167,
             "longitude": 11.2752}
        # Station with everything necessary
        station_5 = {"id": "BW.FURT",
             "latitude": 47.737167,
             "longitude": 11.2752,
             "elevation_in_m": 565.0}

        gen = InputFileGenerator()
        # The first 4 should raise a ValueError
        self.assertRaises(ValueError, gen.add_stations, station_1)
        self.assertRaises(ValueError, gen.add_stations, station_2)
        self.assertRaises(ValueError, gen.add_stations, station_3)
        self.assertRaises(ValueError, gen.add_stations, station_4)
        # The last one not.
        gen.add_stations(station_5)
def test_adding_invalid_file_to_station_raises():
    """
    Adding some invalid things should of course raise.
    """
    gen = InputFileGenerator()
    with pytest.raises(IOError):
        gen.add_stations("some_nonesense")
def test_adding_invalid_file_to_station_raises():
    """
    Adding some invalid things should of course raise.
    """
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_stations("some_nonesense")
def test_local_depth_will_be_set_to_zero():
    """
    Tests that the local depth will be set to zero if not given.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0}]
    json_stations = json.dumps(stations)
    gen = InputFileGenerator()
    gen.add_stations(stations)
    # Now add the local depth again.
    stations[0]["local_depth_in_m"] = 0.0
    stations[1]["local_depth_in_m"] = 0.0
    assert sorted(stations) == sorted(gen._stations)

    # Repeat with the JSON variant.
    gen = InputFileGenerator()
    gen.add_stations(json_stations)
    assert sorted(stations) == sorted(gen._stations)
def test_local_depth_will_be_set_to_zero():
    """
    Tests that the local depth will be set to zero if not given.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0}]
    json_stations = json.dumps(stations)
    gen = InputFileGenerator()
    gen.add_stations(stations)
    # Now add the local depth again.
    stations[0]["local_depth_in_m"] = 0.0
    stations[1]["local_depth_in_m"] = 0.0
    assert sorted(stations) == sorted(gen._stations)

    # Repeat with the JSON variant.
    gen = InputFileGenerator()
    gen.add_stations(json_stations)
    assert sorted(stations) == sorted(gen._stations)
def test_adding_sac_file_without_coordinates():
    """
    This sac file has no coordinates, thus no station should actually be added.
    """
    sac_file = os.path.join(DATA, "example_without_coordinates.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)
    assert gen._stations == []
def test_adding_sac_file_without_coordinates():
    """
    This sac file has no coordinates, thus no station should actually be added.
    """
    sac_file = os.path.join(DATA, "example_without_coordinates.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)
    assert gen._stations == []
def main():
    gen = InputFileGenerator()

    # SES3D 4.0 can only simulate one event at a time.
    gen.add_events("../tests/data/event1.xml")
    gen.add_stations([
        "../tests/data/dataless.seed.BW_FURT",
        "../tests/data/dataless.seed.BW_RJOB"
    ])

    # Just perform a standard forward simulation.
    gen.config.simulation_type = "normal simulation"

    gen.config.output_folder = "../OUTPUT"

    # Time configuration.
    gen.config.number_of_time_steps = 700
    gen.config.time_increment_in_s = 0.75

    # SES3D specific configuration
    gen.config.output_directory = "../DATA/OUTPUT/1.8s"
    # SES3D specific discretization
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4

    # Specify some source time function.
    gen.config.source_time_function = np.sin(np.linspace(0, 10, 700))

    # Configure the mesh.
    gen.config.mesh_min_latitude = -50.0
    gen.config.mesh_max_latitude = 50.0
    gen.config.mesh_min_longitude = -50.0
    gen.config.mesh_max_longitude = 50.0
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 200.0

    # Define the rotation. Take care this is defined as the rotation of the
    # mesh.  The data will be rotated in the opposite direction! The following
    # example will rotate the mesh 5 degrees southwards around the x-axis. For
    # a definition of the coordinate system refer to the rotations.py file. The
    # rotation is entirely optional.
    gen.config.rotation_angle_in_degree = 5.0
    gen.config.rotation_axis = [1.0, 0.0, 0.0]

    # Define Q
    gen.config.is_dissipative = True
    gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
    gen.config.Q_model_weights_of_relaxation_mechanisms = \
        [2.5100, 2.4354, 0.0879]

    # Finally write the file to a folder. If not output directory is given, a
    # dictionary containing all the files will be returned.
    gen.write(format="ses3d_4_0", output_dir="output")
    print "Written files to 'output' folder."
def main():
    gen = InputFileGenerator()

    # SES3D 4.0 can only simulate one event at a time.
    gen.add_events("../tests/data/event1.xml")
    gen.add_stations(["../tests/data/dataless.seed.BW_FURT",
                      "../tests/data/dataless.seed.BW_RJOB"])

    # Just perform a standard forward simulation.
    gen.config.simulation_type = "normal simulation"

    gen.config.output_folder = "../OUTPUT"

    # Time configuration.
    gen.config.number_of_time_steps = 700
    gen.config.time_increment_in_s = 0.75

    # SES3D specific configuration
    gen.config.output_directory = "../DATA/OUTPUT/1.8s"
    # SES3D specific discretization
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4

    # Specify some source time function.
    gen.config.source_time_function = np.sin(np.linspace(0, 10, 700))

    # Configure the mesh.
    gen.config.mesh_min_latitude = -50.0
    gen.config.mesh_max_latitude = 50.0
    gen.config.mesh_min_longitude = -50.0
    gen.config.mesh_max_longitude = 50.0
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 200.0

    # Define the rotation. Take care this is defined as the rotation of the
    # mesh.  The data will be rotated in the opposite direction! The following
    # example will rotate the mesh 5 degrees southwards around the x-axis. For
    # a definition of the coordinate system refer to the rotations.py file. The
    # rotation is entirely optional.
    gen.config.rotation_angle_in_degree = 5.0
    gen.config.rotation_axis = [1.0, 0.0, 0.0]

    # Define Q
    gen.config.is_dissipative = True
    gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
    gen.config.Q_model_weights_of_relaxation_mechanisms = \
        [2.5100, 2.4354, 0.0879]

    # Finally write the file to a folder. If not output directory is given, a
    # dictionary containing all the files will be returned.
    gen.write(format="ses3d_4_0", output_dir="output")
    print "Written files to 'output' folder."
def test_adding_stations_as_URLs():
    """
    StationXML should be downloaded if necessary.

    Mock the actual downloading.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]

    station_xml_file = os.path.join(DATA, "station.xml")
    with open(station_xml_file, "rb") as fh:
        data = fh.read()

    gen = InputFileGenerator()

    # Mock the URL
    with mock.patch("urllib2.urlopen") as patch:
        class Dummy(object):
            def read(self):
                return data
        patch.return_value = Dummy()
        gen.add_stations("http://some_url.com")

    patch.assert_called_once_with("http://some_url.com")
    assert sorted(stations) == sorted(gen._stations)
def test_wrong_stf_header_format():
    """
    Simple test asserting that the correct exceptions get raised when
    attempting to write invalid STF headers.
    """
    station = {
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10}
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(station)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False

    gen.config.stf_header = "simple string"
    # Write the input files to a dictionary.
    with pytest.raises(ValueError):
        gen.write(format="ses3d_4_1")

    gen.config.stf_header = ["1", "2", "3", "4", "5", "6"]
    # Write the input files to a dictionary.
    with pytest.raises(ValueError):
        gen.write(format="ses3d_4_1")
def test_adding_stations_as_URLs():
    """
    StationXML should be downloaded if necessary.

    Mock the actual downloading.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]

    station_xml_file = os.path.join(DATA, "station.xml")
    with open(station_xml_file, "rb") as fh:
        data = fh.read()

    gen = InputFileGenerator()

    # Mock the URL
    with mock.patch("urllib2.urlopen") as patch:
        class Dummy(object):
            def read(self):
                return data
        patch.return_value = Dummy()
        gen.add_stations("http://some_url.com")

    patch.assert_called_once_with("http://some_url.com")
    assert sorted(stations) == sorted(gen._stations)
def test_station_filter():
    """
    Tests the filtering of the stations.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "AA.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "BL.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]

    gen = InputFileGenerator()
    gen.add_stations(stations)

    # No applied filter should just result in the same stations being available
    # everywhere.
    assert sorted(gen._filtered_stations) == sorted(gen._stations)

    # Wildcards are ok.
    gen.station_filter = ["HT.*", "AA.*"]
    # Only the last stations should not be available.
    assert sorted(gen._filtered_stations) == sorted(stations[:-1])

    # Removing the filter should make the missing stations reappear.
    gen.station_filter = None
    assert sorted(gen._filtered_stations) == sorted(gen._stations)
    gen.station_filter = []
    assert sorted(gen._filtered_stations) == sorted(gen._stations)
def test_station_filter():
    """
    Tests the filtering of the stations.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "AA.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "BL.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]

    gen = InputFileGenerator()
    gen.add_stations(stations)

    # No applied filter should just result in the same stations being available
    # everywhere.
    assert sorted(gen._filtered_stations) == sorted(gen._stations)

    # Wildcards are ok.
    gen.station_filter = ["HT.*", "AA.*"]
    # Only the last stations should not be available.
    assert sorted(gen._filtered_stations) == sorted(stations[:-1])

    # Removing the filter should make the missing stations reappear.
    gen.station_filter = None
    assert sorted(gen._filtered_stations) == sorted(gen._stations)
    gen.station_filter = []
    assert sorted(gen._filtered_stations) == sorted(gen._stations)
    def test_real_world_example(self):
        
        """
        Test that compares the created input files to those from a real world
        example.

        The only artificial thing is the source-time function but that is
        trivial to verify.

        This is a fairly comprehensive tests but should be used in comparision
        with other unit tests.
        """

        gen = InputFileGenerator()

        axisem_example_path = os.path.join(self.data_dir, "axisem_example")
        gen.add_stations([os.path.join(self.data_dir,"dataless.seed.BW_FURT"), os.path.join(self.data_dir,"dataless.seed.BW_RJOB")])
        gen.add_events(os.path.join(self.data_dir,"event1.xml"))

        # Configure it.
        gen.config.dominant_period= 10.0
        gen.config.seismogram_length=1000.0
        gen.config.number_of_processors=12
        gen.config.background_model = 'prem'
        # Write the input files to a dictionary.
        gen.write(format='axisem', output_dir = path)

        # The rest is only for asserting the produced files.
        for filename in glob.glob(os.path.join(path, "*_example")):
            with open(filename, "rt") as open_file:
                real_file = open_file.read()
            filename = os.path.basename(filename[:-8])

            if filename not in input_files:
                msg = "File '%s' has not been generated" % filename
                raise AssertionError(msg)

            lines = real_file.splitlines()
            new_lines = input_files[filename].splitlines()

            if len(lines) != len(new_lines):
                msg = ("File '%s' does not have the same number of lines "
                    "for the real (%i lines) and generated (%i lines) "
                    "input file") % (filename, len(lines), len(new_lines))
                raise AssertionError(msg)

            for line, new_line in zip(lines, new_lines):
                if line != new_line:
                    msg = "Line differs in file '%s'.\n" % filename
                    msg += "Expected: \"%s\"\n" % line
                    msg += "Got:      \"%s\"\n" % new_line
                    raise AssertionError(msg)
def test_adding_a_single_station_dictionary():
    """
    Tests adding a single station dictionary.
    """
    station = {
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0}
    gen = InputFileGenerator()
    gen.add_stations(station)
    assert [station] == gen._stations
def test_adding_stations_as_SAC_files():
    """
    Tests adding stations as SAC files.
    """
    sac_file = os.path.join(DATA, "example.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)

    assert gen._stations[0]["id"] == "IU.ANMO"
    assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0
    assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0
    assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0
    assert round(gen._stations[0]["local_depth_in_m"] - 145.0, 5) == 0
def test_adding_a_single_station_dictionary():
    """
    Tests adding a single station dictionary.
    """
    station = {
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0}
    gen = InputFileGenerator()
    gen.add_stations(station)
    assert [station] == gen._stations
def test_adding_stations_as_SAC_files():
    """
    Tests adding stations as SAC files.
    """
    sac_file = os.path.join(DATA, "example.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)

    assert gen._stations[0]["id"] == "IU.ANMO"
    assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0
    assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0
    assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0
    assert round(gen._stations[0]["local_depth_in_m"] - 145.0, 5) == 0
def test_station_dicts_with_invalid_information_raise():
    """
    Station dicts that have invalid types that cannot be converted should
    raise!
    """
    # All the coordinate values should be converted to floats.
    station = {"id": "BW.FURT",
               "latitude": "A",
               "longitude": 2,
               "elevation_in_m": 3,
               "local_depth_in_m": 4}
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_stations(station)
def test_adding_sac_file_without_local_depth():
    """
    This file has no local depth. This should be ok.
    """
    sac_file = os.path.join(DATA, "example_without_local_depth.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)

    assert gen._stations[0]["id"] == "IU.ANMO"
    assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0
    assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0
    assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0
    # Local depth will be set to 0 in case it is not available.
    assert gen._stations[0]["local_depth_in_m"] == 0
def test_adding_a_single_station_as_JSON():
    """
    Asserts that a single station can be added as JSON.
    """
    station = {
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0}
    json_station = json.dumps(station)
    gen = InputFileGenerator()
    gen.add_stations(json_station)
    assert [station] == gen._stations
def test_adding_a_single_station_as_JSON():
    """
    Asserts that a single station can be added as JSON.
    """
    station = {
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0}
    json_station = json.dumps(station)
    gen = InputFileGenerator()
    gen.add_stations(json_station)
    assert [station] == gen._stations
def test_adding_sac_file_without_local_depth():
    """
    This file has no local depth. This should be ok.
    """
    sac_file = os.path.join(DATA, "example_without_local_depth.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)

    assert gen._stations[0]["id"] == "IU.ANMO"
    assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0
    assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0
    assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0
    # Local depth will be set to 0 in case it is not available.
    assert gen._stations[0]["local_depth_in_m"] == 0
    def test_adding_single_and_multiple_items(self):
        """
        Reading all files at once or seperate should make not difference.
        """
        seed_file_1 = os.path.join(self.data_dir, "dataless.seed.BW_FURT")
        seed_file_2 = os.path.join(self.data_dir, "dataless.seed.BW_RJOB")
        station_1 = {"id": "BW.FURT",
             "latitude": 48.162899,
             "longitude": 11.2752,
             "elevation_in_m": 565.0,
             "local_depth_in_m": 0.0}
        station_2 = {"id": "BW.RJOB",
             "latitude": 47.737167,
             "longitude": 12.795714,
             "elevation_in_m": 860.0,
             "local_depth_in_m": 0.0}

        # Try with SEED files first.
        gen1 = InputFileGenerator()
        gen2 = InputFileGenerator()
        gen1.add_stations([seed_file_1, seed_file_2])
        gen2.add_stations(seed_file_1)
        gen2.add_stations(seed_file_2)
        self.assertEqual(sorted(gen1._stations), sorted(gen2._stations))

        # Now try with the dictionaries.
        gen1 = InputFileGenerator()
        gen2 = InputFileGenerator()
        gen1.add_stations([station_1, station_2])
        gen2.add_stations(station_1)
        gen2.add_stations(station_2)
        self.assertEqual(sorted(gen1._stations), sorted(gen2._stations))
def test_test_all_files_have_an_empty_last_line():
    """
    Tests that all files have an empty last line.
    """
    station = {
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10
    }
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0
    }

    gen = InputFileGenerator()
    gen.add_stations(station)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False

    # Write the input files to a dictionary.
    input_files = gen.write(format="ses3d_4_0")
    for input_file in input_files.itervalues():
        assert input_file.endswith("\n\n") is True
def test_station_dicts_with_invalid_information_raise():
    """
    Station dicts that have invalid types that cannot be converted should
    raise!
    """
    # All the coordinate values should be converted to floats.
    station = {"id": "BW.FURT",
               "latitude": "A",
               "longitude": 2,
               "elevation_in_m": 3,
               "local_depth_in_m": 4}
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_stations(station)
Exemple #28
0
def test_test_all_files_have_an_empty_last_line():
    """
    Tests that all files have an empty last line.
    """
    station = {
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10}
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(station)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False

    # Write the input files to a dictionary.
    input_files = gen.write(format="ses3d_4_0")
    for input_file in input_files.itervalues():
        assert input_file.endswith("\n\n") is True
 def test_automatic_type_converstion_for_station_dict(self):
     """
     Fields should undergo automatic type conversion.
     """
     # All the coordinate values should be converted to floats.
     station = {"id": "BW.FURT",
          "latitude": 1,
          "longitude": 2,
          "elevation_in_m": "3",
          "local_depth_in_m": "4"}
     gen = InputFileGenerator()
     gen.add_stations(station)
     self.assertEqual(type(gen._stations[0]["latitude"]), float)
     self.assertEqual(type(gen._stations[0]["longitude"]), float)
     self.assertEqual(type(gen._stations[0]["elevation_in_m"]), float)
     self.assertEqual(type(gen._stations[0]["local_depth_in_m"]), float)
 def test_passing_station_dictionaries(self):
     """
     Checks that stations can also be passed as dictionaries.
     """
     stations = [{"id": "BW.FURT",
          "latitude": 48.162899,
          "longitude": 11.2752,
          "elevation_in_m": 565.0,
          "local_depth_in_m": 10.0},
         {"id": "BW.RJOB",
          "latitude": 47.737167,
          "longitude": 12.795714,
          "elevation_in_m": 860.0,
          "local_depth_in_m": 2.0}]
     gen = InputFileGenerator()
     gen.add_stations(stations)
     self.assertEqual(sorted(stations), sorted(gen._stations))
 def test_local_depth_will_be_set_to_zero(self):
     """
     Tests that the local depth will be set to zero if not given.
     """
     stations = [{"id": "BW.FURT",
          "latitude": 48.162899,
          "longitude": 11.2752,
          "elevation_in_m": 565.0},
         {"id": "BW.RJOB",
          "latitude": 47.737167,
          "longitude": 12.795714,
          "elevation_in_m": 860.0}]
     gen = InputFileGenerator()
     gen.add_stations(stations)
     # Now add the local depth again.
     stations[0]["local_depth_in_m"] = 0.0
     stations[1]["local_depth_in_m"] = 0.0
     self.assertEqual(sorted(stations), sorted(gen._stations))
def test_adding_station_as_list_of_dictionaries():
    """
    Checks that stations can also be passed as dictionaries.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0,
         "local_depth_in_m": 2.0}]
    gen = InputFileGenerator()
    gen.add_stations(stations)
    assert sorted(stations) == sorted(gen._stations)
def test_adding_stations_as_SEED_files_via_BytesIO():
    """
    Tests adding stations as SEED files.
    """
    seed_file = os.path.join(DATA, "dataless.seed.BW_FURT")

    with open(seed_file, "rb") as fh:
        seed_file_mem_file = io.BytesIO(fh.read())

    gen = InputFileGenerator()
    gen.add_stations(seed_file_mem_file)

    # Sort to be able to compare.
    assert gen._stations == \
        [{"id": "BW.FURT",
          "latitude": 48.162899,
          "longitude": 11.2752,
          "elevation_in_m": 565.0,
          "local_depth_in_m": 0.0}]
def test_adding_multiple_stations_as_JSON():
    """
    Tests that stations can be added as a JSON list.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0,
         "local_depth_in_m": 2.0}]
    json_stations = json.dumps(stations)
    gen = InputFileGenerator()
    gen.add_stations(json_stations)
    assert sorted(stations) == sorted(gen._stations)
def test_adding_multiple_stations_as_JSON():
    """
    Tests that stations can be added as a JSON list.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0,
         "local_depth_in_m": 2.0}]
    json_stations = json.dumps(stations)
    gen = InputFileGenerator()
    gen.add_stations(json_stations)
    assert sorted(stations) == sorted(gen._stations)
def test_adding_stations_as_SEED_files_via_BytesIO():
    """
    Tests adding stations as SEED files.
    """
    seed_file = os.path.join(DATA, "dataless.seed.BW_FURT")

    with open(seed_file, "rb") as fh:
        seed_file_mem_file = io.BytesIO(fh.read())

    gen = InputFileGenerator()
    gen.add_stations(seed_file_mem_file)

    # Sort to be able to compare.
    assert gen._stations == \
        [{"id": "BW.FURT",
          "latitude": 48.162899,
          "longitude": 11.2752,
          "elevation_in_m": 565.0,
          "local_depth_in_m": 0.0}]
def test_adding_stations_as_StationXML_BytesIO():
    """
    StationXML uploading via a memory file.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]
    station_xml_file = os.path.join(DATA, "station.xml")
    with open(station_xml_file, "rb") as fh:
        station_mem_file = io.BytesIO(fh.read())
    gen = InputFileGenerator()
    gen.add_stations(station_mem_file)
    assert sorted(stations) == sorted(gen._stations)
def test_adding_stations_as_StationXML_BytesIO():
    """
    StationXML uploading via a memory file.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]
    station_xml_file = os.path.join(DATA, "station.xml")
    with open(station_xml_file, "rb") as fh:
        station_mem_file = io.BytesIO(fh.read())
    gen = InputFileGenerator()
    gen.add_stations(station_mem_file)
    assert sorted(stations) == sorted(gen._stations)
    def test_other_fields_in_station_dict_are_eliminated(self):
        """
        Any additional items in a station dict should be eliminated.
        """
        # Station with everything necessary
        station = {"id": "BW.FURT",
             "latitude": 47.737167,
             "longitude": 11.2752,
             "some_random_key": "also_has_a_field",
             "elevation_in_m": 565.0,
             "local_depth_in_m": 324.0,
             "yes!": "no"}

        gen = InputFileGenerator()
        gen.add_stations(station)
        self.assertEqual(sorted(gen._stations),
            sorted([{"id": "BW.FURT",
             "latitude": 47.737167,
             "longitude": 11.2752,
             "elevation_in_m": 565.0,
             "local_depth_in_m": 324.0}]))
def test_adding_stations_as_SEED_files():
    """
    Tests adding stations as SEED files.
    """
    seed_file_1 = os.path.join(DATA, "dataless.seed.BW_FURT")
    seed_file_2 = os.path.join(DATA, "dataless.seed.BW_RJOB")

    gen = InputFileGenerator()
    gen.add_stations([seed_file_1, seed_file_2])

    # Sort to be able to compare.
    assert sorted(gen._stations) == \
        [{"id": "BW.FURT",
          "latitude": 48.162899,
          "longitude": 11.2752,
          "elevation_in_m": 565.0,
          "local_depth_in_m": 0.0},
         {"id": "BW.RJOB",
          "latitude": 47.737167,
          "longitude": 12.795714,
          "elevation_in_m": 860.0,
          "local_depth_in_m": 0.0}]
def test_adding_stations_as_SEED_files():
    """
    Tests adding stations as SEED files.
    """
    seed_file_1 = os.path.join(DATA, "dataless.seed.BW_FURT")
    seed_file_2 = os.path.join(DATA, "dataless.seed.BW_RJOB")

    gen = InputFileGenerator()
    gen.add_stations([seed_file_1, seed_file_2])

    # Sort to be able to compare.
    assert sorted(gen._stations) == \
        [{"id": "BW.FURT",
          "latitude": 48.162899,
          "longitude": 11.2752,
          "elevation_in_m": 565.0,
          "local_depth_in_m": 0.0},
         {"id": "BW.RJOB",
          "latitude": 47.737167,
          "longitude": 12.795714,
          "elevation_in_m": 860.0,
          "local_depth_in_m": 0.0}]
def test_adding_stations_as_StationXML():
    """
    Tests adding stations as StationXML.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]
    station_xml_file = os.path.join(DATA, "station.xml")
    gen = InputFileGenerator()
    gen.add_stations(station_xml_file)
    assert sorted(stations) == sorted(gen._stations)
def test_adding_stations_as_StationXML():
    """
    Tests adding stations as StationXML.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]
    station_xml_file = os.path.join(DATA, "station.xml")
    gen = InputFileGenerator()
    gen.add_stations(station_xml_file)
    assert sorted(stations) == sorted(gen._stations)
def test_other_fields_in_station_dict_are_eliminated():
    """
    Any additional items in a station dict should be eliminated.
    """
    # Station with everything necessary
    station = {
        "id": "BW.FURT",
        "latitude": 47.737167,
        "longitude": 11.2752,
        "some_random_key": "also_has_a_field",
        "elevation_in_m": 565.0,
        "local_depth_in_m": 324.0,
        "yes!": "no"}

    gen = InputFileGenerator()
    gen.add_stations(station)
    assert gen._stations == \
        [{"id": "BW.FURT",
          "latitude": 47.737167,
          "longitude": 11.2752,
          "elevation_in_m": 565.0,
          "local_depth_in_m": 324.0}]
    def test_reading_SEED_files(self):
        """
        Tests the reading of SEED files.
        """
        seed_file_1 = os.path.join(self.data_dir, "dataless.seed.BW_FURT")
        seed_file_2 = os.path.join(self.data_dir, "dataless.seed.BW_RJOB")

        gen = InputFileGenerator()
        gen.add_stations([seed_file_1, seed_file_2])

        # Sort to be able to compare.
        stations = sorted(gen._stations)
        self.assertEqual([
            {"id": "BW.FURT",
             "latitude": 48.162899,
             "longitude": 11.2752,
             "elevation_in_m": 565.0,
             "local_depth_in_m": 0.0},
            {"id": "BW.RJOB",
             "latitude": 47.737167,
             "longitude": 12.795714,
             "elevation_in_m": 860.0,
             "local_depth_in_m": 0.0}], stations)
def test_automatic_type_converstion_for_station_dict():
    """
    Fields should undergo automatic type conversion.
    """
    # All the coordinate values should be converted to floats.
    station = {"id": "BW.FURT",
               "latitude": "1",
               "longitude": "2",
               "elevation_in_m": "3",
               "local_depth_in_m": "4"}
    gen = InputFileGenerator()
    gen.add_stations(station)
    assert type(gen._stations[0]["latitude"]) == float
    assert type(gen._stations[0]["longitude"]) == float
    assert type(gen._stations[0]["elevation_in_m"]) == float
    assert type(gen._stations[0]["local_depth_in_m"]) == float

    assert gen._stations == [{
        "id": "BW.FURT",
        "latitude": 1.0,
        "longitude": 2.0,
        "elevation_in_m": 3.0,
        "local_depth_in_m": 4.0}]
def test_automatic_type_converstion_for_station_dict():
    """
    Fields should undergo automatic type conversion.
    """
    # All the coordinate values should be converted to floats.
    station = {"id": "BW.FURT",
               "latitude": "1",
               "longitude": "2",
               "elevation_in_m": "3",
               "local_depth_in_m": "4"}
    gen = InputFileGenerator()
    gen.add_stations(station)
    assert type(gen._stations[0]["latitude"]) == float
    assert type(gen._stations[0]["longitude"]) == float
    assert type(gen._stations[0]["elevation_in_m"]) == float
    assert type(gen._stations[0]["local_depth_in_m"]) == float

    assert gen._stations == [{
        "id": "BW.FURT",
        "latitude": 1.0,
        "longitude": 2.0,
        "elevation_in_m": 3.0,
        "local_depth_in_m": 4.0}]
Exemple #48
0
    def compute(self):
        gen = InputFileGenerator()
        userconf = json.load(open(self.parameters["solver_conf_file"]))

        fields = userconf["fields"]

        for x in fields:
            gen.add_configuration({x["name"]:self.strToBool(x["value"])})    
    
        with open (self.parameters["quakeml"], "r") as events:
            quakeml=events.read()

        #unicode_qml=quakeml.decode('utf-8')
        #data = unicode_qml.encode('ascii','ignore')

##
        cat=readQuakeML(quakeml)
        events = []
        #cat = obspy.readEvents(data)
#Remove all events with no moment tensor.
        for event in cat:
            for fm in event.focal_mechanisms:
                if fm.moment_tensor and fm.moment_tensor.tensor:
                    events.append(event)
                    break
        cat.events = events

        gen.add_events(cat)

        evn=0
        outputdir=""
        for x in userconf["events"]:
            gen.event_filter=[x]
        
            if self.parameters["station_format"]=="stationXML":
                gen.add_stations(self.parameters["stations_file"])
                
        
            if self.parameters["station_format"]=="points":
                stlist = []
                with open(self.parameters["stations_file"]) as f:
                    k=False
                    for line in f:
                        
                        if (k==False):
                            k=True
                        else:
                            station={}
                            l=line.strip().split(" ")
                            station.update({"id":l[1]+"."+l[0]})           
                            station.update({"latitude":float(l[3])})
                            station.update({"longitude":float(l[2])})
                            station.update({"elevation_in_m":float(l[4])})
                            station.update({"local_depth_in_m":float(l[5])})
                            stlist.append(station)
                        
                        
                gen.add_stations(stlist)
                        
            gen.station_filter = userconf["stations"]
                        
            outputdir=self.outputdest+userconf["runId"]+"/"+userconf["runId"]+"_"+str(evn)+"/DATA"
            output_files = gen.write(format=userconf["solver"], output_dir=outputdir)
            
            
            locations = []
            for x in output_files.keys():
                locations.append("file://"+socket.gethostname()+outputdir+"/"+x)
                
            
            self.addOutput(gen._filtered_events,location=locations,metadata=self.extractEventMetadata(outputdir,gen._filtered_events),control={"con:immediateAccess":"true"})
        
            evn+=1
         
        self.addOutput(outputdir,location=locations,metadata={"to_xdecompose":str(outputdir)},control={"con:immediateAccess":"true"})
Exemple #49
0
    def compute(self):
        gen = InputFileGenerator()
        userconf = json.load(open(self.parameters["solver_conf_file"]))

        fields = userconf["fields"]

        for x in fields:
            gen.add_configuration({x["name"]: self.strToBool(x["value"])})

        with open(self.parameters["quakeml"], "r") as events:
            quakeml = events.read()

        #unicode_qml=quakeml.decode('utf-8')
        #data = unicode_qml.encode('ascii','ignore')


##
        cat = readQuakeML(quakeml)
        events = []
        #cat = obspy.readEvents(data)
        #Remove all events with no moment tensor.
        for event in cat:
            for fm in event.focal_mechanisms:
                if fm.moment_tensor and fm.moment_tensor.tensor:
                    events.append(event)
                    break
        cat.events = events

        gen.add_events(cat)

        evn = 0
        outputdir = ""
        for x in userconf["events"]:
            gen.event_filter = [x]

            if self.parameters["station_format"] == "stationXML":
                gen.add_stations(self.parameters["stations_file"])

            if self.parameters["station_format"] == "points":
                stlist = []
                with open(self.parameters["stations_file"]) as f:
                    k = False
                    for line in f:

                        if (k == False):
                            k = True
                        else:
                            station = {}
                            l = line.strip().split(" ")
                            station.update({"id": l[1] + "." + l[0]})
                            station.update({"latitude": float(l[3])})
                            station.update({"longitude": float(l[2])})
                            station.update({"elevation_in_m": float(l[4])})
                            station.update({"local_depth_in_m": float(l[5])})
                            stlist.append(station)

                gen.add_stations(stlist)

            gen.station_filter = userconf["stations"]

            outputdir = self.outputdest + userconf["runId"] + "/" + userconf[
                "runId"] + "_" + str(evn) + "/DATA"
            output_files = gen.write(format=userconf["solver"],
                                     output_dir=outputdir)

            locations = []
            for x in output_files.keys():
                locations.append("file://" + socket.gethostname() + outputdir +
                                 "/" + x)

            self.addOutput(gen._filtered_events,
                           location=locations,
                           metadata=self.extractEventMetadata(
                               outputdir, gen._filtered_events),
                           control={"con:immediateAccess": "true"})

            evn += 1

        self.addOutput(outputdir,
                       location=locations,
                       metadata={"to_xdecompose": str(outputdir)},
                       control={"con:immediateAccess": "true"})
def test_real_world_example():
    """
    Test that compares the created input files to those from a real world
    example.

    The only artificial thing is the source-time function but that is
    trivial to verify.

    This is a fairly comprehensive tests but should be used in comparision
    with other unit tests.
    """
    stations = [{
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10
    }, {
        "id": "KO.AFSR",
        "latitude": 40.000,
        "longitude": 33.2345,
        "elevation_in_m": 220
    }]
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0
    }

    gen = InputFileGenerator()
    gen.add_stations(stations)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False
    gen.config.adjoint_forward_wavefield_output_folder = \
        "/tmp/some_folder/"
    gen.config.displacement_snapshot_sampling = 15000
    gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
    gen.config.Q_model_weights_of_relaxation_mechanisms = \
        [2.5100, 2.4354, 0.0879]

    # Write the input files to a dictionary.
    input_files = gen.write(format="ses3d_4_0")

    # The rest is only for asserting the produces files.
    path = os.path.join(DATA, "ses3d_4_0_real_world_example")
    for filename in glob.glob(os.path.join(path, "*")):
        with open(filename, "rt") as open_file:
            real_file = open_file.read()
        filename = os.path.basename(filename)

        if filename not in input_files:
            msg = "File '%s' has not been generated" % filename
            raise AssertionError(msg)

        lines = real_file.splitlines()
        new_lines = input_files[filename].splitlines()

        if len(lines) != len(new_lines):
            msg = ("File '%s' does not have the same number of lines "
                   "for the real (%i lines) and generated (%i lines) "
                   "input file") % (filename, len(lines), len(new_lines))
            raise AssertionError(msg)

        for line, new_line in zip(lines, new_lines):
            if line != new_line:
                msg = "Line differs in file '%s'.\n" % filename
                msg += "Expected: \"%s\"\n" % line
                msg += "Got:      \"%s\"\n" % new_line
                raise AssertionError(msg)
def test_simple():
    """
    Test a very simple SPECFEM file.
    """
    stations = [
        {
            "id": "KO.ADVT",
            "latitude": 41.0,
            "longitude": 33.1234,
            "elevation_in_m": 10
        }, {
            "id": "KO.AFSR",
            "latitude": 40.000,
            "longitude": 33.2345,
            "elevation_in_m": 220
        }
    ]
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(stations)
    gen.add_events(event)

    # Configure it.
    gen.config.NPROC = 5
    gen.config.NSTEP = 10
    gen.config.DT = 15
    gen.config.SIMULATION_TYPE = 1

    # Write the input files to a dictionary.
    input_files = gen.write(format="SPECFEM3D_CARTESIAN")

    assert bool(input_files)

    assert sorted(input_files.keys()) == \
        sorted(["Par_file", "CMTSOLUTION", "STATIONS"])

    # Assert the STATIONS file.
    assert input_files["STATIONS"].splitlines() == [
        "ADVT KO 41.00000 33.12340 10.0 0.0",
        "AFSR KO 40.00000 33.23450 220.0 0.0"]

    # Assert the CMTSOLUTION file.
    assert input_files["CMTSOLUTION"].splitlines() == [
        "PDE 2012 4 12 7 15 48.50 39.26000 41.04000 5.00000 4.7 4.7 "
        "2012-04-12T07:15:48.500000Z_4.7",
        "event name:      0000000",
        "time shift:       0.0000",
        "half duration:    0.0000",
        "latitude:       39.26000",
        "longitude:      41.04000",
        "depth:          5.00000",
        "Mrr:         1e+23",
        "Mtt:         1e+23",
        "Mpp:         1e+23",
        "Mrt:         0",
        "Mrp:         0",
        "Mtp:         0"]

    # XXX: Extend test.
    par_file = input_files["Par_file"]
    assert "SIMULATION_TYPE" in par_file
    assert "NSTEP" in par_file
def test_id_lat_lon_ele_are_necessary():
    """
    Tests that some station fields need to be set.
    """
    # Station with missing id.
    station_1 = {
        "latitude": 47.737167,
        "longitude": 11.2752,
        "elevation_in_m": 565.0}
    # Station with missing latitude.
    station_2 = {
        "id": "BW.FURT",
        "longitude": 11.2752,
        "elevation_in_m": 565.0}
    # Station with missing longitude.
    station_3 = {
        "id": "BW.FURT",
        "latitude": 47.737167,
        "elevation_in_m": 565.0}
    # Station with missing elevation.
    station_4 = {
        "id": "BW.FURT",
        "latitude": 47.737167,
        "longitude": 11.2752}
    # Station with everything necessary
    station_5 = {
        "id": "BW.FURT",
        "latitude": 47.737167,
        "longitude": 11.2752,
        "elevation_in_m": 565.0}

    gen = InputFileGenerator()
    # The first 4 should raise a ValueError
    with pytest.raises(ValueError):
        gen.add_stations(station_1)
    with pytest.raises(ValueError):
        gen.add_stations(station_2)
    with pytest.raises(ValueError):
        gen.add_stations(station_3)
    with pytest.raises(ValueError):
        gen.add_stations(station_4)
    # The last one not.
    gen.add_stations(station_5)

    # Do exactly the same with JSON variants.
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_1))
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_2))
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_3))
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_4))
    gen.add_stations(station_5)
def test_adding_single_and_multiple_station():
    """
    Reading all files at once or seperate should make not difference.
    """
    seed_file_1 = os.path.join(DATA, "dataless.seed.BW_FURT")
    seed_file_2 = os.path.join(DATA, "dataless.seed.BW_RJOB")
    station_1 = {
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 0.0}
    station_2 = {
        "id": "BW.RJOB",
        "latitude": 47.737167,
        "longitude": 12.795714,
        "elevation_in_m": 860.0,
        "local_depth_in_m": 0.0}

    # Try with SEED files first.
    gen1 = InputFileGenerator()
    gen2 = InputFileGenerator()
    gen1.add_stations([seed_file_1, seed_file_2])
    gen2.add_stations(seed_file_1)
    gen2.add_stations(seed_file_2)
    assert sorted(gen1._stations) == sorted(gen2._stations)

    # Now try with the dictionaries.
    gen1 = InputFileGenerator()
    gen2 = InputFileGenerator()
    gen1.add_stations([station_1, station_2])
    gen2.add_stations(station_1)
    gen2.add_stations(station_2)
    assert sorted(gen1._stations) == sorted(gen2._stations)

    # Now with JSON.
    gen1 = InputFileGenerator()
    gen2 = InputFileGenerator()
    gen1.add_stations(json.dumps([station_1, station_2]))
    gen2.add_stations(json.dumps(station_1))
    gen2.add_stations(json.dumps(station_2))
    assert sorted(gen1._stations) == sorted(gen2._stations)
import os

from wfs_input_generator import InputFileGenerator



gen = InputFileGenerator()
gen.add_events("wfs_input_generator/tests/data/event1.xml")
gen.add_stations(["wfs_input_generator/tests/data/dataless.seed.BW_FURT", "wfs_input_generator/tests/data/dataless.seed.BW_RJOB"])
gen.config.mesh={'mesh':"eucrust_small_new"}
gen.config.model={'model':"model_eucrust_small_new.dat"}

gen.config.parameter={'version':18, 'dimension':3, 'advection':0, 'advection_velocity':(1.0, 1.0, 1.0),\
        'anisotropy':0, 'anelasticity':0, 'poroelasticity':0, 
        'adjoint':0, 'material_reference_values':(3600, 9.0e10, 1.11e11),\
        'randomfield':0, 'sourcetype':50, 'source_file':'source.dat', 'sponge':0,\
        'meshgenerator':"Gambit3D-Tetra",'fine_output':0, 'restartfile':0,\
        'DGMethod':1, 'CK':0,'fluxmethod':0, 'DGCycle':1, 'basisfunction_degree':0,\
        'reconstructed_basisfunction_degree':0,'stencil_security_factor':0,\
        'reconstruction_type':0, 'exponent_r':0, 'coefficient_epsilon':0,\
        'linear_weight':0, 'limiter_security_factor':0, 'minspace_order':5,\
        'maxspace_order':5, 'pAdaptivity_file_name':'pAdaptivity_file_name',\
        'material_basis_function_order':1, 'courant_number':0.5, 'min_time_step':10000,\
        'rotational_output':0, 'rotation_components':(1, 1, 1),\
        'variable_output':(0, 0, 0, 0, 0, 0, 1, 1, 1),\
        'material_parameters_output':(1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0),\
        'output_character':0 , 'output_format':1, 'timestep_output':50,\
        'time_output':25, 'output_index':1, 'sampling':1, 'max_time':2000,\
        'max_iteration':1000,'max_wallclocktime':1e20, 'delay':0}

test_dir="/home/msimon/svn/repos/verce/All/JRA/JRA1/python/test_ressources/inputfiles/"
Exemple #55
0
    def generate_input_files(self, event_name, template_name, simulation_type,
                             source_time_function):
        """
        Generate the input files for one event.

        :param event_name: The name of the event for which to generate the
            input files.
        :param template_name: The name of the input file template
        :param simulation_type: The type of simulation to perform. Possible
            values are: 'normal simulation', 'adjoint forward', 'adjoint
            reverse'
        :param source_time_function: A function source_time_function(npts,
            delta), taking the requested number of samples and the time spacing
            and returning an appropriate source time function as numpy array.
        """
        from lasif import utils
        from wfs_input_generator import InputFileGenerator

        # Get the events
        all_events = self.get_event_dict()
        if event_name not in all_events:
            msg = "Event '%s' not found in project." % event_name
            raise ValueError(msg)

        event = self.get_event(event_name)

        # Get the input file templates.
        template_filename = os.path.join(self.paths["templates"],
                                         template_name + ".xml")
        if not os.path.exists(template_filename):
            msg = "Template '%s' does not exists." % template_name
            raise ValueError(msg)
        input_file = utils.read_ses3d_4_0_template(template_filename)

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.get_stations_for_event(event_name)
        stations = [{
            "id": key,
            "latitude": value["latitude"],
            "longitude": value["longitude"],
            "elevation_in_m": value["elevation"],
            "local_depth_in_m": value["local_depth"]
        } for key, value in stations.iteritems()]

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event)
        gen.add_stations(stations)

        npts = input_file["simulation_parameters"]["number_of_time_steps"]
        delta = input_file["simulation_parameters"]["time_increment"]
        # Time configuration.
        gen.config.number_of_time_steps = npts
        gen.config.time_increment_in_s = delta

        # SES3D specific configuration
        gen.config.output_folder = input_file["output_directory"]
        gen.config.simulation_type = simulation_type

        gen.config.adjoint_forward_wavefield_output_folder = \
            input_file["adjoint_output_parameters"][
                "forward_field_output_directory"]
        gen.config.adjoint_forward_sampling_rate = \
            input_file["adjoint_output_parameters"][
                "sampling_rate_of_forward_field"]
        gen.config.is_dissipative = \
            input_file["simulation_parameters"]["is_dissipative"]

        # Discretization
        disc = input_file["computational_setup"]
        gen.config.nx_global = disc["nx_global"]
        gen.config.ny_global = disc["ny_global"]
        gen.config.nz_global = disc["nz_global"]
        gen.config.px = disc["px_processors_in_theta_direction"]
        gen.config.py = disc["py_processors_in_phi_direction"]
        gen.config.pz = disc["pz_processors_in_r_direction"]
        gen.config.lagrange_polynomial_degree = \
            disc["lagrange_polynomial_degree"]

        # Configure the mesh.
        gen.config.mesh_min_latitude = \
            self.domain["bounds"]["minimum_latitude"]
        gen.config.mesh_max_latitude = \
            self.domain["bounds"]["maximum_latitude"]
        gen.config.mesh_min_longitude = \
            self.domain["bounds"]["minimum_longitude"]
        gen.config.mesh_max_longitude = \
            self.domain["bounds"]["maximum_longitude"]
        gen.config.mesh_min_depth_in_km = \
            self.domain["bounds"]["minimum_depth_in_km"]
        gen.config.mesh_max_depth_in_km = \
            self.domain["bounds"]["maximum_depth_in_km"]

        gen.config.rotation_angle_in_degree = self.domain["rotation_angle"]
        gen.config.rotation_axis = self.domain["rotation_axis"]

        gen.config.source_time_function = source_time_function(
            int(npts), float(delta))

        output_dir = self.get_output_folder("input_files___%s" % template_name)

        gen.write(format="ses3d_4_0", output_dir=output_dir)
        print "Written files to '%s'." % output_dir
    def test_real_world_examlpe(self):
        """
        Test that compares the created input files to those from a real world
        example.

        The only artificial thing is the source-time function but that is
        trivial to verify.

        This is a fairly comprehensive tests but should be used in comparision
        with other unit tests.
        """
        stations = [
            {
                "id": "KO.ADVT",
                "latitude": 41.0,
                "longitude": 33.1234,
                "elevation_in_m": 10
            }, {
                "id": "KO.AFSR",
                "latitude": 40.000,
                "longitude": 33.2345,
                "elevation_in_m": 220
            }
        ]
        event = {
            "latitude": 39.260,
            "longitude": 41.040,
            "depth_in_km": 5.0,
            "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
            "m_rr": 1.0e16,
            "m_tt": 1.0e16,
            "m_pp": 1.0e16,
            "m_rt": 0.0,
            "m_rp": 0.0,
            "m_tp": 0.0}

        gen = InputFileGenerator()
        gen.add_stations(stations)
        gen.add_events(event)

        # Configure it.
        gen.config.number_of_time_steps = 4000
        gen.config.time_increment_in_s = 0.13
        gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
        gen.config.mesh_min_latitude = 34.1
        gen.config.mesh_max_latitude = 42.9
        gen.config.mesh_min_longitude = 23.1
        gen.config.mesh_max_longitude = 42.9
        gen.config.mesh_min_depth_in_km = 0.0
        gen.config.mesh_max_depth_in_km = 471.0
        gen.config.nx_global = 66
        gen.config.ny_global = 108
        gen.config.nz_global = 28
        gen.config.px = 3
        gen.config.py = 4
        gen.config.pz = 4
        gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
        gen.config.is_dissipative = False
        gen.config.adjoint_forward_wavefield_output_folder = \
            "/tmp/some_folder/"
        gen.config.displacement_snapshot_sampling = 15000
        gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
        gen.config.Q_model_weights_of_relaxation_mechanisms = \
            [2.5100, 2.4354, 0.0879]

        # Write the input files to a dictionary.
        input_files = gen.write(format="ses3d_4_0")

        # The rest is only for asserting the produces files.
        path = os.path.join(self.data_dir, "ses3d_4_0_real_world_example")
        for filename in glob.glob(os.path.join(path, "*")):
            with open(filename, "rt") as open_file:
                real_file = open_file.read().strip()
            filename = os.path.basename(filename)

            if filename not in input_files:
                msg = "File '%s' has not been generated" % filename
                raise AssertionError(msg)

            lines = real_file.splitlines()
            new_lines = input_files[filename].splitlines()

            if len(lines) != len(new_lines):
                msg = ("File '%s' does not have the same number of lines "
                    "for the real (%i lines) and generated (%i lines) "
                    "input file") % (filename, len(lines), len(new_lines))
                raise AssertionError(msg)

            for line, new_line in zip(lines, new_lines):
                if line != new_line:
                    msg = "Line differs in file '%s'.\n" % filename
                    msg += "Expected: \"%s\"\n" % line
                    msg += "Got:      \"%s\"\n" % new_line
                    raise AssertionError(msg)
Exemple #57
0
    def generate_input_files(self, iteration_name, event_name,
                             simulation_type):
        """
        Generate the input files for one event.

        :param iteration_name: The name of the iteration.
        :param event_name: The name of the event for which to generate the
            input files.
        :param simulate_type: The type of simulation to perform. Possible
            values are: 'normal simulate', 'adjoint forward', 'adjoint
            reverse'
        """
        from wfs_input_generator import InputFileGenerator

        # =====================================================================
        # read iteration xml file, get event and list of stations
        # =====================================================================

        iteration = self.comm.iterations.get(iteration_name)

        # Check that the event is part of the iterations.
        if event_name not in iteration.events:
            msg = ("Event '%s' not part of iteration '%s'.\nEvents available "
                   "in iteration:\n\t%s" %
                   (event_name, iteration_name, "\n\t".join(
                       sorted(iteration.events.keys()))))
            raise ValueError(msg)

        event = self.comm.events.get(event_name)
        stations_for_event = list(
            iteration.events[event_name]["stations"].keys())

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.comm.query.get_all_stations_for_event(event_name)
        stations = [{
            "id": key,
            "latitude": value["latitude"],
            "longitude": value["longitude"],
            "elevation_in_m": value["elevation_in_m"],
            "local_depth_in_m": value["local_depth_in_m"]
        } for key, value in stations.items() if key in stations_for_event]

        # =====================================================================
        # set solver options
        # =====================================================================

        solver = iteration.solver_settings

        # Currently only SES3D 4.1 is supported
        solver_format = solver["solver"].lower()
        if solver_format not in [
                "ses3d 4.1", "ses3d 2.0", "specfem3d cartesian",
                "specfem3d globe cem"
        ]:
            msg = ("Currently only SES3D 4.1, SES3D 2.0, SPECFEM3D "
                   "CARTESIAN, and SPECFEM3D GLOBE CEM are supported.")
            raise ValueError(msg)
        solver_format = solver_format.replace(' ', '_')
        solver_format = solver_format.replace('.', '_')

        solver = solver["solver_settings"]

        # =====================================================================
        # create the input file generator, add event and stations,
        # populate the configuration items
        # =====================================================================

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event["filename"])
        gen.add_stations(stations)

        if solver_format in ["ses3d_4_1", "ses3d_2_0"]:
            # event tag
            gen.config.event_tag = event_name

            # Time configuration.
            npts = solver["simulation_parameters"]["number_of_time_steps"]
            delta = solver["simulation_parameters"]["time_increment"]
            gen.config.number_of_time_steps = npts
            gen.config.time_increment_in_s = delta

            # SES3D specific configuration
            gen.config.output_folder = solver["output_directory"].replace(
                "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.simulation_type = simulation_type

            gen.config.adjoint_forward_wavefield_output_folder = \
                solver["adjoint_output_parameters"][
                    "forward_field_output_directory"].replace(
                    "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.adjoint_forward_sampling_rate = \
                solver["adjoint_output_parameters"][
                    "sampling_rate_of_forward_field"]

            # Visco-elastic dissipation
            diss = solver["simulation_parameters"]["is_dissipative"]
            gen.config.is_dissipative = diss

            # Only SES3D 4.1 has the relaxation parameters.
            if solver_format == "ses3d_4_1":
                gen.config.Q_model_relaxation_times = \
                    solver["relaxation_parameter_list"]["tau"]
                gen.config.Q_model_weights_of_relaxation_mechanisms = \
                    solver["relaxation_parameter_list"]["w"]

            # Discretization
            disc = solver["computational_setup"]
            gen.config.nx_global = disc["nx_global"]
            gen.config.ny_global = disc["ny_global"]
            gen.config.nz_global = disc["nz_global"]
            gen.config.px = disc["px_processors_in_theta_direction"]
            gen.config.py = disc["py_processors_in_phi_direction"]
            gen.config.pz = disc["pz_processors_in_r_direction"]
            gen.config.lagrange_polynomial_degree = \
                disc["lagrange_polynomial_degree"]

            # Configure the mesh.
            domain = self.comm.project.domain
            gen.config.mesh_min_latitude = domain.min_latitude
            gen.config.mesh_max_latitude = domain.max_latitude
            gen.config.mesh_min_longitude = domain.min_longitude
            gen.config.mesh_max_longitude = domain.max_longitude
            gen.config.mesh_min_depth_in_km = domain.min_depth_in_km
            gen.config.mesh_max_depth_in_km = domain.max_depth_in_km

            # Set the rotation parameters.
            gen.config.rotation_angle_in_degree = \
                domain.rotation_angle_in_degree
            gen.config.rotation_axis = domain.rotation_axis

            # Make source time function
            gen.config.source_time_function = \
                iteration.get_source_time_function()["data"]
        elif solver_format == "specfem3d_cartesian":
            gen.config.NSTEP = \
                solver["simulation_parameters"]["number_of_time_steps"]
            gen.config.DT = \
                solver["simulation_parameters"]["time_increment"]
            gen.config.NPROC = \
                solver["computational_setup"]["number_of_processors"]
            if simulation_type == "normal simulation":
                msg = ("'normal_simulate' not supported for SPECFEM3D "
                       "Cartesian. Please choose either 'adjoint_forward' or "
                       "'adjoint_reverse'.")
                raise NotImplementedError(msg)
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
            else:
                raise NotImplementedError
            solver_format = solver_format.upper()

        elif solver_format == "specfem3d_globe_cem":
            cs = solver["computational_setup"]
            gen.config.NPROC_XI = cs["number_of_processors_xi"]
            gen.config.NPROC_ETA = cs["number_of_processors_eta"]
            gen.config.NCHUNKS = cs["number_of_chunks"]
            gen.config.NEX_XI = cs["elements_per_chunk_xi"]
            gen.config.NEX_ETA = cs["elements_per_chunk_eta"]
            gen.config.OCEANS = cs["simulate_oceans"]
            gen.config.ELLIPTICITY = cs["simulate_ellipticity"]
            gen.config.TOPOGRAPHY = cs["simulate_topography"]
            gen.config.GRAVITY = cs["simulate_gravity"]
            gen.config.ROTATION = cs["simulate_rotation"]
            gen.config.ATTENUATION = cs["simulate_attenuation"]
            gen.config.ABSORBING_CONDITIONS = True
            if cs["fast_undo_attenuation"]:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = True
                gen.config.UNDO_ATTENUATION = False
            else:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = False
                gen.config.UNDO_ATTENUATION = True
            gen.config.GPU_MODE = cs["use_gpu"]
            gen.config.SOURCE_TIME_FUNCTION = \
                iteration.get_source_time_function()["data"]

            if simulation_type == "normal simulation":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = False
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = True
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
                gen.config.SAVE_FORWARD = True
            else:
                raise NotImplementedError

            # Use the current domain setting to derive the bounds in the way
            # SPECFEM specifies them.
            domain = self.comm.project.domain

            lat_range = domain.max_latitude - \
                domain.min_latitude
            lng_range = domain.max_longitude - \
                domain.min_longitude

            c_lat = \
                domain.min_latitude + lat_range / 2.0
            c_lng = \
                domain.min_longitude + lng_range / 2.0

            # Rotate the point.
            c_lat_1, c_lng_1 = rotations.rotate_lat_lon(
                c_lat, c_lng, domain.rotation_axis,
                domain.rotation_angle_in_degree)

            # SES3D rotation.
            A = rotations._get_rotation_matrix(domain.rotation_axis,
                                               domain.rotation_angle_in_degree)

            latitude_rotation = -(c_lat_1 - c_lat)
            longitude_rotation = c_lng_1 - c_lng

            # Rotate the latitude. The rotation axis is latitude 0 and
            # the center longitude + 90 degree
            B = rotations._get_rotation_matrix(
                rotations.lat_lon_radius_to_xyz(0.0, c_lng + 90, 1.0),
                latitude_rotation)
            # Rotate around the North pole.
            C = rotations._get_rotation_matrix([0.0, 0.0, 1.0],
                                               longitude_rotation)

            D = A * np.linalg.inv(C * B)

            axis, angle = rotations._get_axis_and_angle_from_rotation_matrix(D)
            rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            # Consistency check
            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                axis *= -1.0
                angle *= -1.0
                rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                msg = "Failed to describe the domain in terms that SPECFEM " \
                      "understands. The domain definition in the output " \
                      "files will NOT BE CORRECT!"
                warnings.warn(msg, LASIFWarning)

            gen.config.ANGULAR_WIDTH_XI_IN_DEGREES = lng_range
            gen.config.ANGULAR_WIDTH_ETA_IN_DEGREES = lat_range
            gen.config.CENTER_LATITUDE_IN_DEGREES = c_lat_1
            gen.config.CENTER_LONGITUDE_IN_DEGREES = c_lng_1
            gen.config.GAMMA_ROTATION_AZIMUTH = angle

            gen.config.MODEL = cs["model"]

            pp = iteration.get_process_params()
            gen.config.RECORD_LENGTH_IN_MINUTES = \
                (pp["npts"] * pp["dt"]) / 60.0
            solver_format = solver_format.upper()

        else:
            msg = "Unknown solver '%s'." % solver_format
            raise NotImplementedError(msg)

        # =================================================================
        # output
        # =================================================================
        output_dir = self.comm.project.get_output_folder(
            type="input_files",
            tag="ITERATION_%s__%s__EVENT_%s" %
            (iteration_name, simulation_type.replace(" ", "_"), event_name))

        gen.write(format=solver_format, output_dir=output_dir)
        print("Written files to '%s'." % output_dir)