def test_adding_invalid_file_to_event_raises():
    """
    Adding some invalid things should of course raise.
    """
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_events("some_nonesense")
def test_adding_invalid_file_to_event_raises():
    """
    Adding some invalid things should of course raise.
    """
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_events("some_nonesense")
 def test_reading_events_from_dictionary(self):
     """
     Tests that events can also be passed as dictionaries.
     """
     events = [{
         "latitude": 45.0,
         "longitude": 12.1,
         "depth_in_km": 13.0,
         "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
         "m_rr": -2.11e+18,
         "m_tt": -4.22e+19,
         "m_pp": 4.43e+19,
         "m_rt": -9.35e+18,
         "m_rp": -8.38e+18,
         "m_tp": -6.44e+18
     }, {
         "latitude": 13.93,
         "longitude": -92.47,
         "depth_in_km": 28.7,
         "origin_time": UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
         "m_rr": 1.02e+20,
         "m_tt": -7.96e+19,
         "m_pp": -2.19e+19,
         "m_rt": 6.94e+19,
         "m_rp": -4.08e+19,
         "m_tp": 4.09e+19}]
     gen = InputFileGenerator()
     gen.add_events(events)
     self.assertEqual(sorted(gen._events), sorted(events))
    def test_reading_QuakeML_files(self):
        """
        Tests the reading of QuakeML Files.
        """
        event_file_1 = os.path.join(self.data_dir, "event1.xml")
        event_file_2 = os.path.join(self.data_dir, "event2.xml")

        gen = InputFileGenerator()
        gen.add_events([event_file_1, event_file_2])

        # Sort to be able to compare.
        events = sorted(gen._events)
        self.assertEqual([{
            "latitude": 45.0,
            "longitude": 12.1,
            "depth_in_km": 13.0,
            "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
            "m_rr": -2.11e+18,
            "m_tt": -4.22e+19,
            "m_pp": 4.43e+19,
            "m_rt": -9.35e+18,
            "m_rp": -8.38e+18,
            "m_tp": -6.44e+18
        }, {
            "latitude": 13.93,
            "longitude": -92.47,
            "depth_in_km": 28.7,
            "origin_time": UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
            "m_rr": 1.02e+20,
            "m_tt": -7.96e+19,
            "m_pp": -2.19e+19,
            "m_rt": 6.94e+19,
            "m_rp": -4.08e+19,
            "m_tp": 4.09e+19}],
            events)
def test_reading_events_from_dictionary():
    """
    Tests that events can also be passed as dictionaries.
    """
    events = [{
        "description": "Event at a sunny place.",
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18
    }, {
        "description": "Event at a rainy place.",
        "latitude": 13.93,
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19}]
    gen = InputFileGenerator()
    gen.add_events(events)
    assert sorted(gen._events) == sorted(events)
def test_adding_events_as_URL():
    """
    QuakeMLs should be downloaded if necessary.

    Mock the actual downloading.
    """
    event = {"description": "FICTIONAL EVENT IN BAVARIA",
             "latitude": 45.0,
             "longitude": 12.1,
             "depth_in_km": 13.0,
             "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
             "m_rr": -2.11e+18,
             "m_tt": -4.22e+19,
             "m_pp": 4.43e+19,
             "m_rt": -9.35e+18,
             "m_rp": -8.38e+18,
             "m_tp": -6.44e+18,
             "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}

    quake_ml_file = os.path.join(DATA, "event1.xml")
    with open(quake_ml_file, "rb") as fh:
        data = fh.read()

    gen = InputFileGenerator()

    # Mock the URL
    with mock.patch("urllib2.urlopen") as patch:
        class Dummy(object):
            def read(self):
                return data
        patch.return_value = Dummy()
        gen.add_events("http://some_url.com")

    patch.assert_called_once_with("http://some_url.com")
    assert [event] == gen._events
def test_adding_multiple_events_JSON():
    """
    Tests adding multiple events as JSON.
    """
    events = [{
        "latitude": 45.0,
        "description": "Some description",
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": str(obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000)),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18
    }, {
        "latitude": 13.93,
        "description": "Some other description",
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": str(obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000)),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19}]
    gen = InputFileGenerator()
    gen.add_events(json.dumps(events))

    events[0]["origin_time"] = obspy.UTCDateTime(events[0]["origin_time"])
    events[1]["origin_time"] = obspy.UTCDateTime(events[1]["origin_time"])
    assert sorted(gen._events) == sorted(events)
def test_adding_multiple_events_JSON():
    """
    Tests adding multiple events as JSON.
    """
    events = [{
        "latitude": 45.0,
        "description": "Some description",
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": str(obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000)),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18
    }, {
        "latitude": 13.93,
        "description": "Some other description",
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": str(obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000)),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19}]
    gen = InputFileGenerator()
    gen.add_events(json.dumps(events))

    events[0]["origin_time"] = obspy.UTCDateTime(events[0]["origin_time"])
    events[1]["origin_time"] = obspy.UTCDateTime(events[1]["origin_time"])
    assert sorted(gen._events) == sorted(events)
def test_adding_events_as_URL():
    """
    QuakeMLs should be downloaded if necessary.

    Mock the actual downloading.
    """
    event = {"description": "FICTIONAL EVENT IN BAVARIA",
             "latitude": 45.0,
             "longitude": 12.1,
             "depth_in_km": 13.0,
             "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
             "m_rr": -2.11e+18,
             "m_tt": -4.22e+19,
             "m_pp": 4.43e+19,
             "m_rt": -9.35e+18,
             "m_rp": -8.38e+18,
             "m_tp": -6.44e+18,
             "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}

    quake_ml_file = os.path.join(DATA, "event1.xml")
    with open(quake_ml_file, "rb") as fh:
        data = fh.read()

    gen = InputFileGenerator()

    # Mock the URL
    with mock.patch("urllib2.urlopen") as patch:
        class Dummy(object):
            def read(self):
                return data
        patch.return_value = Dummy()
        gen.add_events("http://some_url.com")

    patch.assert_called_once_with("http://some_url.com")
    assert [event] == gen._events
def main():
    gen = InputFileGenerator()

    # SES3D 4.0 can only simulate one event at a time.
    gen.add_events("../tests/data/event1.xml")
    gen.add_stations([
        "../tests/data/dataless.seed.BW_FURT",
        "../tests/data/dataless.seed.BW_RJOB"
    ])

    # Just perform a standard forward simulation.
    gen.config.simulation_type = "normal simulation"

    gen.config.output_folder = "../OUTPUT"

    # Time configuration.
    gen.config.number_of_time_steps = 700
    gen.config.time_increment_in_s = 0.75

    # SES3D specific configuration
    gen.config.output_directory = "../DATA/OUTPUT/1.8s"
    # SES3D specific discretization
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4

    # Specify some source time function.
    gen.config.source_time_function = np.sin(np.linspace(0, 10, 700))

    # Configure the mesh.
    gen.config.mesh_min_latitude = -50.0
    gen.config.mesh_max_latitude = 50.0
    gen.config.mesh_min_longitude = -50.0
    gen.config.mesh_max_longitude = 50.0
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 200.0

    # Define the rotation. Take care this is defined as the rotation of the
    # mesh.  The data will be rotated in the opposite direction! The following
    # example will rotate the mesh 5 degrees southwards around the x-axis. For
    # a definition of the coordinate system refer to the rotations.py file. The
    # rotation is entirely optional.
    gen.config.rotation_angle_in_degree = 5.0
    gen.config.rotation_axis = [1.0, 0.0, 0.0]

    # Define Q
    gen.config.is_dissipative = True
    gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
    gen.config.Q_model_weights_of_relaxation_mechanisms = \
        [2.5100, 2.4354, 0.0879]

    # Finally write the file to a folder. If not output directory is given, a
    # dictionary containing all the files will be returned.
    gen.write(format="ses3d_4_0", output_dir="output")
    print "Written files to 'output' folder."
Exemplo n.º 11
0
def main():
    gen = InputFileGenerator()

    # SES3D 4.0 can only simulate one event at a time.
    gen.add_events("../tests/data/event1.xml")
    gen.add_stations(["../tests/data/dataless.seed.BW_FURT",
                      "../tests/data/dataless.seed.BW_RJOB"])

    # Just perform a standard forward simulation.
    gen.config.simulation_type = "normal simulation"

    gen.config.output_folder = "../OUTPUT"

    # Time configuration.
    gen.config.number_of_time_steps = 700
    gen.config.time_increment_in_s = 0.75

    # SES3D specific configuration
    gen.config.output_directory = "../DATA/OUTPUT/1.8s"
    # SES3D specific discretization
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4

    # Specify some source time function.
    gen.config.source_time_function = np.sin(np.linspace(0, 10, 700))

    # Configure the mesh.
    gen.config.mesh_min_latitude = -50.0
    gen.config.mesh_max_latitude = 50.0
    gen.config.mesh_min_longitude = -50.0
    gen.config.mesh_max_longitude = 50.0
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 200.0

    # Define the rotation. Take care this is defined as the rotation of the
    # mesh.  The data will be rotated in the opposite direction! The following
    # example will rotate the mesh 5 degrees southwards around the x-axis. For
    # a definition of the coordinate system refer to the rotations.py file. The
    # rotation is entirely optional.
    gen.config.rotation_angle_in_degree = 5.0
    gen.config.rotation_axis = [1.0, 0.0, 0.0]

    # Define Q
    gen.config.is_dissipative = True
    gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
    gen.config.Q_model_weights_of_relaxation_mechanisms = \
        [2.5100, 2.4354, 0.0879]

    # Finally write the file to a folder. If not output directory is given, a
    # dictionary containing all the files will be returned.
    gen.write(format="ses3d_4_0", output_dir="output")
    print "Written files to 'output' folder."
def test_event_filter():
    """
    Tests the filtering of the events.

    This is funky. If any filter is given, it will remove all events that do
    not have an event_id.
    """
    events = \
        [{"latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "description": "FICTIONAL EVENT IN BAVARIA",
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "description": "GUATEMALA",
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]

    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")
    gen = InputFileGenerator()
    gen.add_events([event_file_1, event_file_2])

    assert sorted(gen._events) == sorted(events)

    # No applied filter should just result in the same stations being available
    # everywhere.
    assert sorted(gen._filtered_events) == sorted(gen._events)

    # Events filters are a simple list of URLS.
    gen.event_filter = ["smi:local/Event/2013-01-07T13:58:41.209477"]
    # Only the last event should now be available.
    assert sorted(gen._filtered_events) == sorted(events[1:])

    # Removing the filter should make the missing events reappear.
    gen.event_filter = None
    assert sorted(gen._filtered_events) == sorted(gen._events)
    gen.station_filter = []
    assert sorted(gen._filtered_events) == sorted(gen._events)
def test_wrong_stf_header_format():
    """
    Simple test asserting that the correct exceptions get raised when
    attempting to write invalid STF headers.
    """
    station = {
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10}
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(station)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False

    gen.config.stf_header = "simple string"
    # Write the input files to a dictionary.
    with pytest.raises(ValueError):
        gen.write(format="ses3d_4_1")

    gen.config.stf_header = ["1", "2", "3", "4", "5", "6"]
    # Write the input files to a dictionary.
    with pytest.raises(ValueError):
        gen.write(format="ses3d_4_1")
def test_event_filter():
    """
    Tests the filtering of the events.

    This is funky. If any filter is given, it will remove all events that do
    not have an event_id.
    """
    events = \
        [{"latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "description": "FICTIONAL EVENT IN BAVARIA",
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "description": "GUATEMALA",
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]

    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")
    gen = InputFileGenerator()
    gen.add_events([event_file_1, event_file_2])

    assert sorted(gen._events) == sorted(events)

    # No applied filter should just result in the same stations being available
    # everywhere.
    assert sorted(gen._filtered_events) == sorted(gen._events)

    # Events filters are a simple list of URLS.
    gen.event_filter = ["smi:local/Event/2013-01-07T13:58:41.209477"]
    # Only the last event should now be available.
    assert sorted(gen._filtered_events) == sorted(events[1:])

    # Removing the filter should make the missing events reappear.
    gen.event_filter = None
    assert sorted(gen._filtered_events) == sorted(gen._events)
    gen.station_filter = []
    assert sorted(gen._filtered_events) == sorted(gen._events)
    def test_real_world_example(self):
        
        """
        Test that compares the created input files to those from a real world
        example.

        The only artificial thing is the source-time function but that is
        trivial to verify.

        This is a fairly comprehensive tests but should be used in comparision
        with other unit tests.
        """

        gen = InputFileGenerator()

        axisem_example_path = os.path.join(self.data_dir, "axisem_example")
        gen.add_stations([os.path.join(self.data_dir,"dataless.seed.BW_FURT"), os.path.join(self.data_dir,"dataless.seed.BW_RJOB")])
        gen.add_events(os.path.join(self.data_dir,"event1.xml"))

        # Configure it.
        gen.config.dominant_period= 10.0
        gen.config.seismogram_length=1000.0
        gen.config.number_of_processors=12
        gen.config.background_model = 'prem'
        # Write the input files to a dictionary.
        gen.write(format='axisem', output_dir = path)

        # The rest is only for asserting the produced files.
        for filename in glob.glob(os.path.join(path, "*_example")):
            with open(filename, "rt") as open_file:
                real_file = open_file.read()
            filename = os.path.basename(filename[:-8])

            if filename not in input_files:
                msg = "File '%s' has not been generated" % filename
                raise AssertionError(msg)

            lines = real_file.splitlines()
            new_lines = input_files[filename].splitlines()

            if len(lines) != len(new_lines):
                msg = ("File '%s' does not have the same number of lines "
                    "for the real (%i lines) and generated (%i lines) "
                    "input file") % (filename, len(lines), len(new_lines))
                raise AssertionError(msg)

            for line, new_line in zip(lines, new_lines):
                if line != new_line:
                    msg = "Line differs in file '%s'.\n" % filename
                    msg += "Expected: \"%s\"\n" % line
                    msg += "Got:      \"%s\"\n" % new_line
                    raise AssertionError(msg)
def test_test_all_files_have_an_empty_last_line():
    """
    Tests that all files have an empty last line.
    """
    station = {
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10
    }
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0
    }

    gen = InputFileGenerator()
    gen.add_stations(station)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False

    # Write the input files to a dictionary.
    input_files = gen.write(format="ses3d_4_0")
    for input_file in input_files.itervalues():
        assert input_file.endswith("\n\n") is True
Exemplo n.º 17
0
def test_test_all_files_have_an_empty_last_line():
    """
    Tests that all files have an empty last line.
    """
    station = {
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10}
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(station)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False

    # Write the input files to a dictionary.
    input_files = gen.write(format="ses3d_4_0")
    for input_file in input_files.itervalues():
        assert input_file.endswith("\n\n") is True
def test_adding_dict_with_missing_keys():
    """
    Tests the adding of a dictionary with missing keys.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_events(event)
def test_adding_dict_with_missing_keys():
    """
    Tests the adding of a dictionary with missing keys.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_events(event)
def test_adding_single_event_dictionary():
    """
    Adding a single event dictionary.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "description": "Some description",
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    gen.add_events(event)
    assert gen._events == [event]
def test_adding_single_event_dictionary():
    """
    Adding a single event dictionary.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "description": "Some description",
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    gen.add_events(event)
    assert gen._events == [event]
def test_reading_QuakeML_from_BytesIO():
    """
    Tests the reading of QuakeML from BytesIO.
    """
    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")

    with open(event_file_1, "rb") as fh:
        event_file_1_mem = io.BytesIO(fh.read())

    with open(event_file_2, "rb") as fh:
        event_file_2_mem = io.BytesIO(fh.read())

    gen = InputFileGenerator()
    gen.add_events([event_file_1_mem, event_file_2_mem])

    # Sort to be able to compare.
    assert sorted(gen._events) == \
        [{"description": "FICTIONAL EVENT IN BAVARIA",
          "latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"description": "GUATEMALA",
          "latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]
def test_event_filter_removed_everything_without_an_id():
    """
    An applied event filter will remove all events without an id.
    """
    events = [{
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18,
        "description": "Some description"
    }, {
        "latitude": 13.93,
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19,
        "description": None}]
    gen = InputFileGenerator()
    gen.add_events(events)

    assert sorted(gen._filtered_events) == sorted(events)

    # Applying a filter will remove everything.
    gen.event_filter = ["smi://some/url"]
    assert sorted(gen._filtered_events) == []

    # Removing the filter should make the missing events reappear.
    gen.event_filter = None
    assert sorted(gen._filtered_events) == sorted(gen._events)
    gen.station_filter = []
    assert sorted(gen._filtered_events) == sorted(gen._events)
def test_event_dictionary_automatic_type_conversion():
    """
    The types for the event dictionary should also undergo automatic type
    conversion.
    """
    event = {
        "description": 1,
        "latitude": "1",
        "longitude": "2",
        "depth_in_km": "3",
        "origin_time": "2012-01-01T00:00:00.000000Z",
        "m_rr": "-2.11e+18",
        "m_tt": "-4.22e+19",
        "m_pp": "4.43e+19",
        "m_rt": "-9.35e+18",
        "m_rp": "-8.38e+18",
        "m_tp": "-6.44e+18"}
    gen = InputFileGenerator()
    gen.add_events(event)
    assert type(gen._events[0]["latitude"]) == float
    assert type(gen._events[0]["longitude"]) == float
    assert type(gen._events[0]["depth_in_km"]) == float
    assert type(gen._events[0]["origin_time"]) == obspy.UTCDateTime
    assert type(gen._events[0]["m_rr"]) == float
    assert type(gen._events[0]["m_tt"]) == float
    assert type(gen._events[0]["m_pp"]) == float
    assert type(gen._events[0]["m_rt"]) == float
    assert type(gen._events[0]["m_rp"]) == float
    assert type(gen._events[0]["m_tp"]) == float

    assert gen._events == [{
        "description": "1",
        "latitude": 1.0,
        "longitude": 2.0,
        "depth_in_km": 3.0,
        "origin_time": obspy.UTCDateTime(2012, 1, 1),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}]
def test_event_dictionary_automatic_type_conversion():
    """
    The types for the event dictionary should also undergo automatic type
    conversion.
    """
    event = {
        "description": 1,
        "latitude": "1",
        "longitude": "2",
        "depth_in_km": "3",
        "origin_time": "2012-01-01T00:00:00.000000Z",
        "m_rr": "-2.11e+18",
        "m_tt": "-4.22e+19",
        "m_pp": "4.43e+19",
        "m_rt": "-9.35e+18",
        "m_rp": "-8.38e+18",
        "m_tp": "-6.44e+18"}
    gen = InputFileGenerator()
    gen.add_events(event)
    assert type(gen._events[0]["latitude"]) == float
    assert type(gen._events[0]["longitude"]) == float
    assert type(gen._events[0]["depth_in_km"]) == float
    assert type(gen._events[0]["origin_time"]) == obspy.UTCDateTime
    assert type(gen._events[0]["m_rr"]) == float
    assert type(gen._events[0]["m_tt"]) == float
    assert type(gen._events[0]["m_pp"]) == float
    assert type(gen._events[0]["m_rt"]) == float
    assert type(gen._events[0]["m_rp"]) == float
    assert type(gen._events[0]["m_tp"]) == float

    assert gen._events == [{
        "description": "1",
        "latitude": 1.0,
        "longitude": 2.0,
        "depth_in_km": 3.0,
        "origin_time": obspy.UTCDateTime(2012, 1, 1),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}]
def test_reading_QuakeML_from_BytesIO():
    """
    Tests the reading of QuakeML from BytesIO.
    """
    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")

    with open(event_file_1, "rb") as fh:
        event_file_1_mem = io.BytesIO(fh.read())

    with open(event_file_2, "rb") as fh:
        event_file_2_mem = io.BytesIO(fh.read())

    gen = InputFileGenerator()
    gen.add_events([event_file_1_mem, event_file_2_mem])

    # Sort to be able to compare.
    assert sorted(gen._events) == \
        [{"description": "FICTIONAL EVENT IN BAVARIA",
          "latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"description": "GUATEMALA",
          "latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]
def test_event_filter_removed_everything_without_an_id():
    """
    An applied event filter will remove all events without an id.
    """
    events = [{
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18,
        "description": "Some description"
    }, {
        "latitude": 13.93,
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19,
        "description": None}]
    gen = InputFileGenerator()
    gen.add_events(events)

    assert sorted(gen._filtered_events) == sorted(events)

    # Applying a filter will remove everything.
    gen.event_filter = ["smi://some/url"]
    assert sorted(gen._filtered_events) == []

    # Removing the filter should make the missing events reappear.
    gen.event_filter = None
    assert sorted(gen._filtered_events) == sorted(gen._events)
    gen.station_filter = []
    assert sorted(gen._filtered_events) == sorted(gen._events)
def test_adding_single_event_JSON():
    """
    Adding a single event as a JSON file.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": str(obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000)),
        "description": None,
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    gen.add_events(json.dumps(event))

    event["origin_time"] = obspy.UTCDateTime(event["origin_time"])
    assert gen._events == [event]
def test_adding_single_event_JSON():
    """
    Adding a single event as a JSON file.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": str(obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000)),
        "description": None,
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    gen.add_events(json.dumps(event))

    event["origin_time"] = obspy.UTCDateTime(event["origin_time"])
    assert gen._events == [event]
def test_additional_attributes_from_event_dicts_are_removed():
    """
    No need to carry around additional attributes.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "description": "Random description",
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18,
        "extra": "blub"}
    gen = InputFileGenerator()
    gen.add_events(event)

    del event["extra"]
    assert gen._events == [event]
def test_additional_attributes_from_event_dicts_are_removed():
    """
    No need to carry around additional attributes.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "description": "Random description",
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18,
        "extra": "blub"}
    gen = InputFileGenerator()
    gen.add_events(event)

    del event["extra"]
    assert gen._events == [event]
def test_adding_a_catalog_object():
    """
    Tests adding an obspy.core.event.Catalog object.
    """
    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")

    cat = obspy.read_events(event_file_1)
    cat += obspy.read_events(event_file_2)

    gen = InputFileGenerator()
    gen.add_events(cat)

    assert sorted(gen._events) == \
        [{"description": "FICTIONAL EVENT IN BAVARIA",
          "latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"description": "GUATEMALA",
          "latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]
def test_adding_a_catalog_object():
    """
    Tests adding an obspy.core.event.Catalog object.
    """
    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")

    cat = obspy.readEvents(event_file_1)
    cat += obspy.readEvents(event_file_2)

    gen = InputFileGenerator()
    gen.add_events(cat)

    assert sorted(gen._events) == \
        [{"description": "FICTIONAL EVENT IN BAVARIA",
          "latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"description": "GUATEMALA",
          "latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]
def test_adding_a_event_object():
    """
    Tests adding an already existing event object.
    """
    event_file = os.path.join(DATA, "event1.xml")

    event = obspy.readEvents(event_file)[0]

    gen = InputFileGenerator()
    gen.add_events([event])

    assert gen._events == [
        {"description": "FICTIONAL EVENT IN BAVARIA",
         "latitude": 45.0,
         "longitude": 12.1,
         "depth_in_km": 13.0,
         "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
         "m_rr": -2.11e+18,
         "m_tt": -4.22e+19,
         "m_pp": 4.43e+19,
         "m_rt": -9.35e+18,
         "m_rp": -8.38e+18,
         "m_tp": -6.44e+18,
         "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}]
def test_adding_a_event_object():
    """
    Tests adding an already existing event object.
    """
    event_file = os.path.join(DATA, "event1.xml")

    event = obspy.read_events(event_file)[0]

    gen = InputFileGenerator()
    gen.add_events([event])

    assert gen._events == [
        {"description": "FICTIONAL EVENT IN BAVARIA",
         "latitude": 45.0,
         "longitude": 12.1,
         "depth_in_km": 13.0,
         "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
         "m_rr": -2.11e+18,
         "m_tt": -4.22e+19,
         "m_pp": 4.43e+19,
         "m_rt": -9.35e+18,
         "m_rp": -8.38e+18,
         "m_tp": -6.44e+18,
         "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}]
import os

from wfs_input_generator import InputFileGenerator



gen = InputFileGenerator()
gen.add_events("wfs_input_generator/tests/data/event1.xml")
gen.add_stations(["wfs_input_generator/tests/data/dataless.seed.BW_FURT", "wfs_input_generator/tests/data/dataless.seed.BW_RJOB"])
gen.config.mesh={'mesh':"eucrust_small_new"}
gen.config.model={'model':"model_eucrust_small_new.dat"}

gen.config.parameter={'version':18, 'dimension':3, 'advection':0, 'advection_velocity':(1.0, 1.0, 1.0),\
        'anisotropy':0, 'anelasticity':0, 'poroelasticity':0, 
        'adjoint':0, 'material_reference_values':(3600, 9.0e10, 1.11e11),\
        'randomfield':0, 'sourcetype':50, 'source_file':'source.dat', 'sponge':0,\
        'meshgenerator':"Gambit3D-Tetra",'fine_output':0, 'restartfile':0,\
        'DGMethod':1, 'CK':0,'fluxmethod':0, 'DGCycle':1, 'basisfunction_degree':0,\
        'reconstructed_basisfunction_degree':0,'stencil_security_factor':0,\
        'reconstruction_type':0, 'exponent_r':0, 'coefficient_epsilon':0,\
        'linear_weight':0, 'limiter_security_factor':0, 'minspace_order':5,\
        'maxspace_order':5, 'pAdaptivity_file_name':'pAdaptivity_file_name',\
        'material_basis_function_order':1, 'courant_number':0.5, 'min_time_step':10000,\
        'rotational_output':0, 'rotation_components':(1, 1, 1),\
        'variable_output':(0, 0, 0, 0, 0, 0, 1, 1, 1),\
        'material_parameters_output':(1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0),\
        'output_character':0 , 'output_format':1, 'timestep_output':50,\
        'time_output':25, 'output_index':1, 'sampling':1, 'max_time':2000,\
        'max_iteration':1000,'max_wallclocktime':1e20, 'delay':0}

test_dir="/home/msimon/svn/repos/verce/All/JRA/JRA1/python/test_ressources/inputfiles/"
def test_simple():
    """
    Test a very simple SPECFEM file.
    """
    stations = [
        {
            "id": "KO.ADVT",
            "latitude": 41.0,
            "longitude": 33.1234,
            "elevation_in_m": 10
        }, {
            "id": "KO.AFSR",
            "latitude": 40.000,
            "longitude": 33.2345,
            "elevation_in_m": 220
        }
    ]
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(stations)
    gen.add_events(event)

    # Configure it.
    gen.config.NPROC = 5
    gen.config.NSTEP = 10
    gen.config.DT = 15
    gen.config.SIMULATION_TYPE = 1

    # Write the input files to a dictionary.
    input_files = gen.write(format="SPECFEM3D_CARTESIAN")

    assert bool(input_files)

    assert sorted(input_files.keys()) == \
        sorted(["Par_file", "CMTSOLUTION", "STATIONS"])

    # Assert the STATIONS file.
    assert input_files["STATIONS"].splitlines() == [
        "ADVT KO 41.00000 33.12340 10.0 0.0",
        "AFSR KO 40.00000 33.23450 220.0 0.0"]

    # Assert the CMTSOLUTION file.
    assert input_files["CMTSOLUTION"].splitlines() == [
        "PDE 2012 4 12 7 15 48.50 39.26000 41.04000 5.00000 4.7 4.7 "
        "2012-04-12T07:15:48.500000Z_4.7",
        "event name:      0000000",
        "time shift:       0.0000",
        "half duration:    0.0000",
        "latitude:       39.26000",
        "longitude:      41.04000",
        "depth:          5.00000",
        "Mrr:         1e+23",
        "Mtt:         1e+23",
        "Mpp:         1e+23",
        "Mrt:         0",
        "Mrp:         0",
        "Mtp:         0"]

    # XXX: Extend test.
    par_file = input_files["Par_file"]
    assert "SIMULATION_TYPE" in par_file
    assert "NSTEP" in par_file
Exemplo n.º 38
0
    def generate_input_files(self, event_name, template_name, simulation_type,
                             source_time_function):
        """
        Generate the input files for one event.

        :param event_name: The name of the event for which to generate the
            input files.
        :param template_name: The name of the input file template
        :param simulation_type: The type of simulation to perform. Possible
            values are: 'normal simulation', 'adjoint forward', 'adjoint
            reverse'
        :param source_time_function: A function source_time_function(npts,
            delta), taking the requested number of samples and the time spacing
            and returning an appropriate source time function as numpy array.
        """
        from lasif import utils
        from wfs_input_generator import InputFileGenerator

        # Get the events
        all_events = self.get_event_dict()
        if event_name not in all_events:
            msg = "Event '%s' not found in project." % event_name
            raise ValueError(msg)

        event = self.get_event(event_name)

        # Get the input file templates.
        template_filename = os.path.join(self.paths["templates"],
                                         template_name + ".xml")
        if not os.path.exists(template_filename):
            msg = "Template '%s' does not exists." % template_name
            raise ValueError(msg)
        input_file = utils.read_ses3d_4_0_template(template_filename)

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.get_stations_for_event(event_name)
        stations = [{
            "id": key,
            "latitude": value["latitude"],
            "longitude": value["longitude"],
            "elevation_in_m": value["elevation"],
            "local_depth_in_m": value["local_depth"]
        } for key, value in stations.iteritems()]

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event)
        gen.add_stations(stations)

        npts = input_file["simulation_parameters"]["number_of_time_steps"]
        delta = input_file["simulation_parameters"]["time_increment"]
        # Time configuration.
        gen.config.number_of_time_steps = npts
        gen.config.time_increment_in_s = delta

        # SES3D specific configuration
        gen.config.output_folder = input_file["output_directory"]
        gen.config.simulation_type = simulation_type

        gen.config.adjoint_forward_wavefield_output_folder = \
            input_file["adjoint_output_parameters"][
                "forward_field_output_directory"]
        gen.config.adjoint_forward_sampling_rate = \
            input_file["adjoint_output_parameters"][
                "sampling_rate_of_forward_field"]
        gen.config.is_dissipative = \
            input_file["simulation_parameters"]["is_dissipative"]

        # Discretization
        disc = input_file["computational_setup"]
        gen.config.nx_global = disc["nx_global"]
        gen.config.ny_global = disc["ny_global"]
        gen.config.nz_global = disc["nz_global"]
        gen.config.px = disc["px_processors_in_theta_direction"]
        gen.config.py = disc["py_processors_in_phi_direction"]
        gen.config.pz = disc["pz_processors_in_r_direction"]
        gen.config.lagrange_polynomial_degree = \
            disc["lagrange_polynomial_degree"]

        # Configure the mesh.
        gen.config.mesh_min_latitude = \
            self.domain["bounds"]["minimum_latitude"]
        gen.config.mesh_max_latitude = \
            self.domain["bounds"]["maximum_latitude"]
        gen.config.mesh_min_longitude = \
            self.domain["bounds"]["minimum_longitude"]
        gen.config.mesh_max_longitude = \
            self.domain["bounds"]["maximum_longitude"]
        gen.config.mesh_min_depth_in_km = \
            self.domain["bounds"]["minimum_depth_in_km"]
        gen.config.mesh_max_depth_in_km = \
            self.domain["bounds"]["maximum_depth_in_km"]

        gen.config.rotation_angle_in_degree = self.domain["rotation_angle"]
        gen.config.rotation_axis = self.domain["rotation_axis"]

        gen.config.source_time_function = source_time_function(
            int(npts), float(delta))

        output_dir = self.get_output_folder("input_files___%s" % template_name)

        gen.write(format="ses3d_4_0", output_dir=output_dir)
        print "Written files to '%s'." % output_dir
Exemplo n.º 39
0
    def compute(self):
        gen = InputFileGenerator()
        userconf = json.load(open(self.parameters["solver_conf_file"]))

        fields = userconf["fields"]

        for x in fields:
            gen.add_configuration({x["name"]: self.strToBool(x["value"])})

        with open(self.parameters["quakeml"], "r") as events:
            quakeml = events.read()

        #unicode_qml=quakeml.decode('utf-8')
        #data = unicode_qml.encode('ascii','ignore')


##
        cat = readQuakeML(quakeml)
        events = []
        #cat = obspy.readEvents(data)
        #Remove all events with no moment tensor.
        for event in cat:
            for fm in event.focal_mechanisms:
                if fm.moment_tensor and fm.moment_tensor.tensor:
                    events.append(event)
                    break
        cat.events = events

        gen.add_events(cat)

        evn = 0
        outputdir = ""
        for x in userconf["events"]:
            gen.event_filter = [x]

            if self.parameters["station_format"] == "stationXML":
                gen.add_stations(self.parameters["stations_file"])

            if self.parameters["station_format"] == "points":
                stlist = []
                with open(self.parameters["stations_file"]) as f:
                    k = False
                    for line in f:

                        if (k == False):
                            k = True
                        else:
                            station = {}
                            l = line.strip().split(" ")
                            station.update({"id": l[1] + "." + l[0]})
                            station.update({"latitude": float(l[3])})
                            station.update({"longitude": float(l[2])})
                            station.update({"elevation_in_m": float(l[4])})
                            station.update({"local_depth_in_m": float(l[5])})
                            stlist.append(station)

                gen.add_stations(stlist)

            gen.station_filter = userconf["stations"]

            outputdir = self.outputdest + userconf["runId"] + "/" + userconf[
                "runId"] + "_" + str(evn) + "/DATA"
            output_files = gen.write(format=userconf["solver"],
                                     output_dir=outputdir)

            locations = []
            for x in output_files.keys():
                locations.append("file://" + socket.gethostname() + outputdir +
                                 "/" + x)

            self.addOutput(gen._filtered_events,
                           location=locations,
                           metadata=self.extractEventMetadata(
                               outputdir, gen._filtered_events),
                           control={"con:immediateAccess": "true"})

            evn += 1

        self.addOutput(outputdir,
                       location=locations,
                       metadata={"to_xdecompose": str(outputdir)},
                       control={"con:immediateAccess": "true"})
from wfs_input_generator import InputFileGenerator
from obspy.core import UTCDateTime
gen = InputFileGenerator()
seissol_example_path = 'wfs_input_generator/tests/data/seissol_example/'
data_dir = 'wfs_input_generator/tests/data/'
import glob
gen.add_stations(glob.glob(data_dir+'*dataless*'))
event = {"latitude": 48.9,"longitude": -2.3,"depth_in_km": 200.0,"origin_time":\
 UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),"m_rr": -2.11e+18,"m_tt": \
 -4.22e+19,"m_pp": 4.43e+19,"m_rt": -9.35e+18,"m_rp": -8.38e+18,"m_tp": -6.44e+18}
gen.add_events(event)
gen.config.mesh = 'most_simple_tet'
gen.config.model = 'PREM'
gen.config.working_directory = seissol_example_path
gen.config.max_time = 1000.0
gen.config.number_of_processors = 16
gen.write(format = 'seissol_1_0', output_dir = seissol_example_path)
This also (at least right now) serves as the documentation of the specific
writer.

:copyright:
    Lion Krischer ([email protected]), 2013
:license:
    GNU General Public License, Version 3
    (http://www.gnu.org/copyleft/gpl.html)
"""
import numpy as np
from wfs_input_generator import InputFileGenerator

gen = InputFileGenerator()

# SES3D 4.0 can only simulate one event at a time.
gen.add_events("../tests/data/event1.xml")
gen.add_stations(["../tests/data/dataless.seed.BW_FURT",
    "../tests/data/dataless.seed.BW_RJOB"])

# Just perform a standard forward simulation.
gen.config.simulation_type = "normal simulation"

gen.config.output_folder = "../OUTPUT"

# Time configuration.
gen.config.number_of_time_steps = 700
gen.config.time_increment_in_s = 0.75

# SES3D specific configuration
gen.config.output_directory = "../DATA/OUTPUT/1.8s"
# SES3D specific discretization
def test_against_example_file():
    """
    Tests against a known example file.
    """
    stations = [
        {
            "id": "KO.ADVT",
            "latitude": 41.0,
            "longitude": 33.1234,
            "elevation_in_m": 10
        }, {
            "id": "KO.AFSR",
            "latitude": 40.000,
            "longitude": 33.2345,
            "elevation_in_m": 220
        }
    ]
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(stations)
    gen.add_events(event)

    # Configure it. Emulate an example in the SPECFEM directory.
    gen.config.NPROC_XI = 2
    gen.config.NPROC_ETA = 2
    gen.config.RECORD_LENGTH_IN_MINUTES = 10.0
    gen.config.SIMULATION_TYPE = 1
    gen.config.NCHUNKS = 6
    gen.config.NEX_XI = 64
    gen.config.NEX_ETA = 64
    gen.config.MODEL = "1D_isotropic_prem"

    # Non-standard values in the example file.
    gen.config.ANGULAR_WIDTH_XI_IN_DEGREES = 20.0
    gen.config.ANGULAR_WIDTH_ETA_IN_DEGREES = 20.0
    gen.config.CENTER_LATITUDE_IN_DEGREES = 40.0
    gen.config.CENTER_LONGITUDE_IN_DEGREES = 25.0
    gen.config.WRITE_SEISMOGRAMS_BY_MASTER = False
    gen.config.NTSTEP_BETWEEN_OUTPUT_INFO = 500
    gen.config.NTSTEP_BETWEEN_FRAMES = 50
    gen.config.MOVIE_COARSE = True

    # Write the input files to a dictionary.
    input_files = gen.write(format="SPECFEM3D_GLOBE")

    assert bool(input_files)

    assert sorted(input_files.keys()) == \
        sorted(["Par_file", "CMTSOLUTION", "STATIONS"])

    # Assert the STATIONS file.
    assert input_files["STATIONS"].splitlines() == [
        "ADVT KO 41.00000 33.12340 10.0 0.0",
        "AFSR KO 40.00000 33.23450 220.0 0.0"]

    # Assert the CMTSOLUTION file.
    assert input_files["CMTSOLUTION"].splitlines() == [
        "PDE 2012 4 12 7 15 48.50 39.26000 41.04000 5.00000 4.7 4.7 "
        "2012-04-12T07:15:48.500000Z_4.7",
        "event name:      0000000",
        "time shift:       0.0000",
        "half duration:    0.0000",
        "latitude:       39.26000",
        "longitude:      41.04000",
        "depth:          5.00000",
        "Mrr:         1e+23",
        "Mtt:         1e+23",
        "Mpp:         1e+23",
        "Mrt:         0",
        "Mrp:         0",
        "Mtp:         0"]

    par_file = input_files["Par_file"]
    assert "SIMULATION_TYPE" in par_file
    assert "NSTEP" in par_file

    # Example Par_file from the repository.
    original_par_file = os.path.join(DATA, "specfem_globe",
                                     "Par_file")
    with open(original_par_file, "rt") as fh:
        original_par_file = fh.read().strip()
    assert original_par_file == par_file
Exemplo n.º 43
0
    def generate_input_files(self, event_name, template_name, simulation_type,
            source_time_function):
        """
        Generate the input files for one event.

        :param event_name: The name of the event for which to generate the
            input files.
        :param template_name: The name of the input file template
        :param simulation_type: The type of simulation to perform. Possible
            values are: 'normal simulation', 'adjoint forward', 'adjoint
            reverse'
        :param source_time_function: A function source_time_function(npts,
            delta), taking the requested number of samples and the time spacing
            and returning an appropriate source time function as numpy array.
        """
        from lasif import utils
        from wfs_input_generator import InputFileGenerator

        # Get the events
        all_events = self.get_event_dict()
        if event_name not in all_events:
            msg = "Event '%s' not found in project." % event_name
            raise ValueError(msg)

        event = self.get_event(event_name)

        # Get the input file templates.
        template_filename = os.path.join(self.paths["templates"],
            template_name + ".xml")
        if not os.path.exists(template_filename):
            msg = "Template '%s' does not exists." % template_name
            raise ValueError(msg)
        input_file = utils.read_ses3d_4_0_template(template_filename)

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.get_stations_for_event(event_name)
        stations = [{"id": key, "latitude": value["latitude"],
            "longitude": value["longitude"],
            "elevation_in_m": value["elevation"],
            "local_depth_in_m": value["local_depth"]} for key, value in
            stations.iteritems()]

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event)
        gen.add_stations(stations)

        npts = input_file["simulation_parameters"]["number_of_time_steps"]
        delta = input_file["simulation_parameters"]["time_increment"]
        # Time configuration.
        gen.config.number_of_time_steps = npts
        gen.config.time_increment_in_s = delta

        # SES3D specific configuration
        gen.config.output_folder = input_file["output_directory"]
        gen.config.simulation_type = simulation_type

        gen.config.adjoint_forward_wavefield_output_folder = \
            input_file["adjoint_output_parameters"][
                "forward_field_output_directory"]
        gen.config.adjoint_forward_sampling_rate = \
            input_file["adjoint_output_parameters"][
                "sampling_rate_of_forward_field"]
        gen.config.is_dissipative = \
            input_file["simulation_parameters"]["is_dissipative"]

        # Discretization
        disc = input_file["computational_setup"]
        gen.config.nx_global = disc["nx_global"]
        gen.config.ny_global = disc["ny_global"]
        gen.config.nz_global = disc["nz_global"]
        gen.config.px = disc["px_processors_in_theta_direction"]
        gen.config.py = disc["py_processors_in_phi_direction"]
        gen.config.pz = disc["pz_processors_in_r_direction"]
        gen.config.lagrange_polynomial_degree = \
            disc["lagrange_polynomial_degree"]

        # Configure the mesh.
        gen.config.mesh_min_latitude = \
            self.domain["bounds"]["minimum_latitude"]
        gen.config.mesh_max_latitude = \
            self.domain["bounds"]["maximum_latitude"]
        gen.config.mesh_min_longitude = \
            self.domain["bounds"]["minimum_longitude"]
        gen.config.mesh_max_longitude = \
            self.domain["bounds"]["maximum_longitude"]
        gen.config.mesh_min_depth_in_km = \
            self.domain["bounds"]["minimum_depth_in_km"]
        gen.config.mesh_max_depth_in_km = \
            self.domain["bounds"]["maximum_depth_in_km"]

        gen.config.rotation_angle_in_degree = self.domain["rotation_angle"]
        gen.config.rotation_axis = self.domain["rotation_axis"]

        gen.config.source_time_function = source_time_function(int(npts),
            float(delta))

        output_dir = self.get_output_folder("input_files___%s" % template_name)

        gen.write(format="ses3d_4_0", output_dir=output_dir)
        print "Written files to '%s'." % output_dir
:copyright:
    Lion Krischer ([email protected]), 2013
:license:
    GNU Lesser General Public License, Version 3
    (http://www.gnu.org/copyleft/lesser.html)
"""
from wfs_input_generator import InputFileGenerator

import os

gen = InputFileGenerator()

# Paths to the used event and station data.
data_path = os.path.join(os.pardir, "tests", "data")
gen.add_events(os.path.join(data_path, "event1.xml"))
gen.add_stations([os.path.join(data_path, "dataless.seed.BW_FURT"),
    os.path.join(data_path, "dataless.seed.BW_RJOB")])

# Time configuration
gen.config.time_config.time_steps = 700
gen.config.time_config.time_delta = 0.75

# Configure the mesh. Make it have ten elements in each direction.
gen.config.mesh.n_north_south = 10
gen.config.mesh.n_west_east = 10
gen.config.mesh.n_down_up = 10
gen.config.mesh.min_latitude = -10.0
gen.config.mesh.max_latitude = 10.0
gen.config.mesh.min_longitude = 0.0
gen.config.mesh.max_latitude = 20.0
Exemplo n.º 45
0
    def generate_input_files(self, iteration_name, event_name,
                             simulation_type):
        """
        Generate the input files for one event.

        :param iteration_name: The name of the iteration.
        :param event_name: The name of the event for which to generate the
            input files.
        :param simulate_type: The type of simulation to perform. Possible
            values are: 'normal simulate', 'adjoint forward', 'adjoint
            reverse'
        """
        from wfs_input_generator import InputFileGenerator

        # =====================================================================
        # read iteration xml file, get event and list of stations
        # =====================================================================

        iteration = self.comm.iterations.get(iteration_name)

        # Check that the event is part of the iterations.
        if event_name not in iteration.events:
            msg = ("Event '%s' not part of iteration '%s'.\nEvents available "
                   "in iteration:\n\t%s" %
                   (event_name, iteration_name, "\n\t".join(
                       sorted(iteration.events.keys()))))
            raise ValueError(msg)

        event = self.comm.events.get(event_name)
        stations_for_event = iteration.events[event_name]["stations"].keys()

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.comm.query.get_all_stations_for_event(event_name)
        stations = [{"id": key, "latitude": value["latitude"],
                     "longitude": value["longitude"],
                     "elevation_in_m": value["elevation_in_m"],
                     "local_depth_in_m": value["local_depth_in_m"]}
                    for key, value in stations.iteritems()
                    if key in stations_for_event]

        # =====================================================================
        # set solver options
        # =====================================================================

        solver = iteration.solver_settings

        # Currently only SES3D 4.1 is supported
        solver_format = solver["solver"].lower()
        if solver_format not in ["ses3d 4.1", "ses3d 2.0",
                                 "specfem3d cartesian", "specfem3d globe cem"]:
            msg = ("Currently only SES3D 4.1, SES3D 2.0, SPECFEM3D "
                   "CARTESIAN, and SPECFEM3D GLOBE CEM are supported.")
            raise ValueError(msg)
        solver_format = solver_format.replace(' ', '_')
        solver_format = solver_format.replace('.', '_')

        solver = solver["solver_settings"]

        # =====================================================================
        # create the input file generator, add event and stations,
        # populate the configuration items
        # =====================================================================

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event["filename"])
        gen.add_stations(stations)

        if solver_format in ["ses3d_4_1", "ses3d_2_0"]:
            # event tag
            gen.config.event_tag = event_name

            # Time configuration.
            npts = solver["simulation_parameters"]["number_of_time_steps"]
            delta = solver["simulation_parameters"]["time_increment"]
            gen.config.number_of_time_steps = npts
            gen.config.time_increment_in_s = delta

            # SES3D specific configuration
            gen.config.output_folder = solver["output_directory"].replace(
                "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.simulate_type = simulation_type

            gen.config.adjoint_forward_wavefield_output_folder = \
                solver["adjoint_output_parameters"][
                    "forward_field_output_directory"].replace(
                    "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.adjoint_forward_sampling_rate = \
                solver["adjoint_output_parameters"][
                    "sampling_rate_of_forward_field"]

            # Visco-elastic dissipation
            diss = solver["simulation_parameters"]["is_dissipative"]
            gen.config.is_dissipative = diss

            # Only SES3D 4.1 has the relaxation parameters.
            if solver_format == "ses3d_4_1":
                gen.config.Q_model_relaxation_times = \
                    solver["relaxation_parameter_list"]["tau"]
                gen.config.Q_model_weights_of_relaxation_mechanisms = \
                    solver["relaxation_parameter_list"]["w"]

            # Discretization
            disc = solver["computational_setup"]
            gen.config.nx_global = disc["nx_global"]
            gen.config.ny_global = disc["ny_global"]
            gen.config.nz_global = disc["nz_global"]
            gen.config.px = disc["px_processors_in_theta_direction"]
            gen.config.py = disc["py_processors_in_phi_direction"]
            gen.config.pz = disc["pz_processors_in_r_direction"]
            gen.config.lagrange_polynomial_degree = \
                disc["lagrange_polynomial_degree"]

            # Configure the mesh.
            domain = self.comm.project.domain
            gen.config.mesh_min_latitude = \
                domain["bounds"]["minimum_latitude"]
            gen.config.mesh_max_latitude = \
                domain["bounds"]["maximum_latitude"]
            gen.config.mesh_min_longitude = \
                domain["bounds"]["minimum_longitude"]
            gen.config.mesh_max_longitude = \
                domain["bounds"]["maximum_longitude"]
            gen.config.mesh_min_depth_in_km = \
                domain["bounds"]["minimum_depth_in_km"]
            gen.config.mesh_max_depth_in_km = \
                domain["bounds"]["maximum_depth_in_km"]

            # Set the rotation parameters.
            gen.config.rotation_angle_in_degree = domain["rotation_angle"]
            gen.config.rotation_axis = domain["rotation_axis"]

            # Make source time function
            gen.config.source_time_function = \
                iteration.get_source_time_function()["data"]
        elif solver_format == "specfem3d_cartesian":
            gen.config.NSTEP = \
                solver["simulation_parameters"]["number_of_time_steps"]
            gen.config.DT = \
                solver["simulation_parameters"]["time_increment"]
            gen.config.NPROC = \
                solver["computational_setup"]["number_of_processors"]
            if simulation_type == "normal simulation":
                msg = ("'normal_simulate' not supported for SPECFEM3D "
                       "Cartesian. Please choose either 'adjoint_forward' or "
                       "'adjoint_reverse'.")
                raise NotImplementedError(msg)
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
            else:
                raise NotImplementedError
            solver_format = solver_format.upper()

        elif solver_format == "specfem3d_globe_cem":
            cs = solver["computational_setup"]
            gen.config.NPROC_XI = cs["number_of_processors_xi"]
            gen.config.NPROC_ETA = cs["number_of_processors_eta"]
            gen.config.NCHUNKS = cs["number_of_chunks"]
            gen.config.NEX_XI = cs["elements_per_chunk_xi"]
            gen.config.NEX_ETA = cs["elements_per_chunk_eta"]
            gen.config.OCEANS = cs["simulate_oceans"]
            gen.config.ELLIPTICITY = cs["simulate_ellipticity"]
            gen.config.TOPOGRAPHY = cs["simulate_topography"]
            gen.config.GRAVITY = cs["simulate_gravity"]
            gen.config.ROTATION = cs["simulate_rotation"]
            gen.config.ATTENUATION = cs["simulate_attenuation"]
            gen.config.ABSORBING_CONDITIONS = True
            if cs["fast_undo_attenuation"]:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = True
                gen.config.UNDO_ATTENUATION = False
            else:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = False
                gen.config.UNDO_ATTENUATION = True
            gen.config.GPU_MODE = cs["use_gpu"]
            gen.config.SOURCE_TIME_FUNCTION = \
                iteration.get_source_time_function()["data"]

            if simulation_type == "normal simulation":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = False
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = True
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
                gen.config.SAVE_FORWARD = True
            else:
                raise NotImplementedError

            # Use the current domain setting to derive the bounds in the way
            # SPECFEM specifies them.
            domain = self.comm.project.domain

            lat_range = domain["bounds"]["maximum_latitude"] - \
                domain["bounds"]["minimum_latitude"]
            lng_range = domain["bounds"]["maximum_longitude"] - \
                        domain["bounds"]["minimum_longitude"]

            c_lat = \
                domain["bounds"]["minimum_latitude"] + lat_range / 2.0
            c_lng = \
                domain["bounds"]["minimum_longitude"] + lng_range / 2.0

            # Rotate the point.
            c_lat_1, c_lng_1 = rotations.rotate_lat_lon(
                c_lat, c_lng, domain["rotation_axis"],
                domain["rotation_angle"])

            # SES3D rotation.
            A = rotations._get_rotation_matrix(
                domain["rotation_axis"], domain["rotation_angle"])

            latitude_rotation = -(c_lat_1 - c_lat)
            longitude_rotation = c_lng_1 - c_lng

            # Rotate the latitude. The rotation axis is latitude 0 and
            # the center longitude + 90 degree
            B = rotations._get_rotation_matrix(
                rotations.lat_lon_radius_to_xyz(0.0, c_lng + 90, 1.0),
                latitude_rotation)
            # Rotate around the North pole.
            C = rotations._get_rotation_matrix(
                [0.0, 0.0, 1.0], longitude_rotation)

            D = A * np.linalg.inv(C * B)

            axis, angle = rotations._get_axis_and_angle_from_rotation_matrix(D)
            rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            # Consistency check
            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                axis *= -1.0
                angle *= -1.0
                rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                        abs(rotated_axis[1] - c_lng_1) >= 0.01:
                msg = "Failed to describe the domain in terms that SPECFEM " \
                      "understands"
                raise LASIFError(msg)

            gen.config.ANGULAR_WIDTH_XI_IN_DEGREES = lng_range
            gen.config.ANGULAR_WIDTH_ETA_IN_DEGREES = lat_range
            gen.config.CENTER_LATITUDE_IN_DEGREES = c_lat_1
            gen.config.CENTER_LONGITUDE_IN_DEGREES = c_lng_1
            gen.config.GAMMA_ROTATION_AZIMUTH = angle

            gen.config.MODEL = cs["model"]

            pp = iteration.get_process_params()
            gen.config.RECORD_LENGTH_IN_MINUTES = \
                (pp["npts"] * pp["dt"]) / 60.0
            solver_format = solver_format.upper()

        else:
            msg = "Unknown solver '%s'." % solver_format
            raise NotImplementedError(msg)

        # =================================================================
        # output
        # =================================================================
        output_dir = self.comm.project.get_output_folder(
            "input_files___ITERATION_%s__%s__EVENT_%s" % (
                iteration_name, simulation_type.replace(" ", "_"),
                event_name))

        gen.write(format=solver_format, output_dir=output_dir)
        print "Written files to '%s'." % output_dir
def test_real_world_example():
    """
    Test that compares the created input files to those from a real world
    example.

    The only artificial thing is the source-time function but that is
    trivial to verify.

    This is a fairly comprehensive tests but should be used in comparision
    with other unit tests.
    """
    stations = [{
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10
    }, {
        "id": "KO.AFSR",
        "latitude": 40.000,
        "longitude": 33.2345,
        "elevation_in_m": 220
    }]
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0
    }

    gen = InputFileGenerator()
    gen.add_stations(stations)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False
    gen.config.adjoint_forward_wavefield_output_folder = \
        "/tmp/some_folder/"
    gen.config.displacement_snapshot_sampling = 15000
    gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
    gen.config.Q_model_weights_of_relaxation_mechanisms = \
        [2.5100, 2.4354, 0.0879]

    # Write the input files to a dictionary.
    input_files = gen.write(format="ses3d_4_0")

    # The rest is only for asserting the produces files.
    path = os.path.join(DATA, "ses3d_4_0_real_world_example")
    for filename in glob.glob(os.path.join(path, "*")):
        with open(filename, "rt") as open_file:
            real_file = open_file.read()
        filename = os.path.basename(filename)

        if filename not in input_files:
            msg = "File '%s' has not been generated" % filename
            raise AssertionError(msg)

        lines = real_file.splitlines()
        new_lines = input_files[filename].splitlines()

        if len(lines) != len(new_lines):
            msg = ("File '%s' does not have the same number of lines "
                   "for the real (%i lines) and generated (%i lines) "
                   "input file") % (filename, len(lines), len(new_lines))
            raise AssertionError(msg)

        for line, new_line in zip(lines, new_lines):
            if line != new_line:
                msg = "Line differs in file '%s'.\n" % filename
                msg += "Expected: \"%s\"\n" % line
                msg += "Got:      \"%s\"\n" % new_line
                raise AssertionError(msg)
Exemplo n.º 47
0
    def compute(self):
        gen = InputFileGenerator()
        userconf = json.load(open(self.parameters["solver_conf_file"]))

        fields = userconf["fields"]

        for x in fields:
            gen.add_configuration({x["name"]:self.strToBool(x["value"])})    
    
        with open (self.parameters["quakeml"], "r") as events:
            quakeml=events.read()

        #unicode_qml=quakeml.decode('utf-8')
        #data = unicode_qml.encode('ascii','ignore')

##
        cat=readQuakeML(quakeml)
        events = []
        #cat = obspy.readEvents(data)
#Remove all events with no moment tensor.
        for event in cat:
            for fm in event.focal_mechanisms:
                if fm.moment_tensor and fm.moment_tensor.tensor:
                    events.append(event)
                    break
        cat.events = events

        gen.add_events(cat)

        evn=0
        outputdir=""
        for x in userconf["events"]:
            gen.event_filter=[x]
        
            if self.parameters["station_format"]=="stationXML":
                gen.add_stations(self.parameters["stations_file"])
                
        
            if self.parameters["station_format"]=="points":
                stlist = []
                with open(self.parameters["stations_file"]) as f:
                    k=False
                    for line in f:
                        
                        if (k==False):
                            k=True
                        else:
                            station={}
                            l=line.strip().split(" ")
                            station.update({"id":l[1]+"."+l[0]})           
                            station.update({"latitude":float(l[3])})
                            station.update({"longitude":float(l[2])})
                            station.update({"elevation_in_m":float(l[4])})
                            station.update({"local_depth_in_m":float(l[5])})
                            stlist.append(station)
                        
                        
                gen.add_stations(stlist)
                        
            gen.station_filter = userconf["stations"]
                        
            outputdir=self.outputdest+userconf["runId"]+"/"+userconf["runId"]+"_"+str(evn)+"/DATA"
            output_files = gen.write(format=userconf["solver"], output_dir=outputdir)
            
            
            locations = []
            for x in output_files.keys():
                locations.append("file://"+socket.gethostname()+outputdir+"/"+x)
                
            
            self.addOutput(gen._filtered_events,location=locations,metadata=self.extractEventMetadata(outputdir,gen._filtered_events),control={"con:immediateAccess":"true"})
        
            evn+=1
         
        self.addOutput(outputdir,location=locations,metadata={"to_xdecompose":str(outputdir)},control={"con:immediateAccess":"true"})
def test_simple():
    """
    Test a very simple SPECFEM file.
    """
    stations = [
        {
            "id": "KO.ADVT",
            "latitude": 41.0,
            "longitude": 33.1234,
            "elevation_in_m": 10
        }, {
            "id": "KO.AFSR",
            "latitude": 40.000,
            "longitude": 33.2345,
            "elevation_in_m": 220
        }
    ]
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(stations)
    gen.add_events(event)

    # Configure it.
    gen.config.NPROC_XI = 5
    gen.config.NPROC_ETA = 5
    gen.config.RECORD_LENGTH_IN_MINUTES = 15
    gen.config.SIMULATION_TYPE = 1
    gen.config.NCHUNKS = 1
    gen.config.NEX_XI = 240
    gen.config.NEX_ETA = 240
    gen.config.NPROC_XI = 5
    gen.config.NPROC_ETA = 5
    gen.config.MODEL = "CEM_REQUEST"

    # Write the input files to a dictionary.
    input_files = gen.write(format="SPECFEM3D_GLOBE_CEM")

    assert bool(input_files)

    assert sorted(input_files.keys()) == \
        sorted(["Par_file", "CMTSOLUTION", "STATIONS"])

    # Assert the STATIONS file.
    assert input_files["STATIONS"].splitlines() == [
        "ADVT KO 41.00000 33.12340 10.0 0.0",
        "AFSR KO 40.00000 33.23450 220.0 0.0"]

    # Assert the CMTSOLUTION file.
    assert input_files["CMTSOLUTION"].splitlines() == [
        "PDE 2012 4 12 7 15 48.50 39.26000 41.04000 5.00000 4.7 4.7 "
        "2012-04-12T07:15:48.500000Z_4.7",
        "event name:      0000000",
        "time shift:       0.0000",
        "half duration:    0.0000",
        "latitude:       39.26000",
        "longitude:      41.04000",
        "depth:          5.00000",
        "Mrr:         1e+23",
        "Mtt:         1e+23",
        "Mpp:         1e+23",
        "Mrt:         0",
        "Mrp:         0",
        "Mtp:         0"]

    # Compare the Par_file to a working one.
    par_file = input_files["Par_file"]

    with open(os.path.join(DATA, "specfem_globe_cem", "Par_file")) as fh:
        expected_par_file = fh.read()

    actual_par_file_lines = par_file.splitlines()
    expected_par_file_lines = expected_par_file.splitlines()

    for actual, expected in zip(actual_par_file_lines,
                                expected_par_file_lines):
        assert actual == expected
def test_external_source_time_function():
    """
    Test a very simple SPECFEM file.
    """
    stations = [
        {
            "id": "KO.ADVT",
            "latitude": 41.0,
            "longitude": 33.1234,
            "elevation_in_m": 10
        }, {
            "id": "KO.AFSR",
            "latitude": 40.000,
            "longitude": 33.2345,
            "elevation_in_m": 220
        }
    ]
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0}

    gen = InputFileGenerator()
    gen.add_stations(stations)
    gen.add_events(event)

    # Configure it.
    gen.config.NPROC_XI = 5
    gen.config.NPROC_ETA = 5
    gen.config.RECORD_LENGTH_IN_MINUTES = 15
    gen.config.SIMULATION_TYPE = 1
    gen.config.NCHUNKS = 1
    gen.config.NEX_XI = 240
    gen.config.NEX_ETA = 240
    gen.config.NPROC_XI = 5
    gen.config.NPROC_ETA = 5
    gen.config.MODEL = "CEM_REQUEST"

    # Write the input files to a dictionary.
    input_files = gen.write(format="SPECFEM3D_GLOBE_CEM")

    # If no source time is specified the external source time function flag
    # must be false.
    assert sorted(input_files.keys()) == \
        ["CMTSOLUTION", "Par_file", "STATIONS"]
    assert "EXTERNAL_SOURCE_TIME_FUNCTION   =  .false." in \
        input_files["Par_file"]

    # Now if one is specified it should also be written.
    gen.config.SOURCE_TIME_FUNCTION = np.arange(10)

    input_files = gen.write(format="SPECFEM3D_GLOBE_CEM")

    assert sorted(input_files.keys()) == \
        ["CMTSOLUTION", "Par_file", "STATIONS", "STF"]
    assert "EXTERNAL_SOURCE_TIME_FUNCTION   =  .true." in \
        input_files["Par_file"]

    data = []
    for line in input_files["STF"].splitlines():
        if line.startswith("#"):
            continue
        data.append(float(line.strip()))
    assert data == map(float, range(10))
    def test_real_world_examlpe(self):
        """
        Test that compares the created input files to those from a real world
        example.

        The only artificial thing is the source-time function but that is
        trivial to verify.

        This is a fairly comprehensive tests but should be used in comparision
        with other unit tests.
        """
        stations = [
            {
                "id": "KO.ADVT",
                "latitude": 41.0,
                "longitude": 33.1234,
                "elevation_in_m": 10
            }, {
                "id": "KO.AFSR",
                "latitude": 40.000,
                "longitude": 33.2345,
                "elevation_in_m": 220
            }
        ]
        event = {
            "latitude": 39.260,
            "longitude": 41.040,
            "depth_in_km": 5.0,
            "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
            "m_rr": 1.0e16,
            "m_tt": 1.0e16,
            "m_pp": 1.0e16,
            "m_rt": 0.0,
            "m_rp": 0.0,
            "m_tp": 0.0}

        gen = InputFileGenerator()
        gen.add_stations(stations)
        gen.add_events(event)

        # Configure it.
        gen.config.number_of_time_steps = 4000
        gen.config.time_increment_in_s = 0.13
        gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
        gen.config.mesh_min_latitude = 34.1
        gen.config.mesh_max_latitude = 42.9
        gen.config.mesh_min_longitude = 23.1
        gen.config.mesh_max_longitude = 42.9
        gen.config.mesh_min_depth_in_km = 0.0
        gen.config.mesh_max_depth_in_km = 471.0
        gen.config.nx_global = 66
        gen.config.ny_global = 108
        gen.config.nz_global = 28
        gen.config.px = 3
        gen.config.py = 4
        gen.config.pz = 4
        gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
        gen.config.is_dissipative = False
        gen.config.adjoint_forward_wavefield_output_folder = \
            "/tmp/some_folder/"
        gen.config.displacement_snapshot_sampling = 15000
        gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
        gen.config.Q_model_weights_of_relaxation_mechanisms = \
            [2.5100, 2.4354, 0.0879]

        # Write the input files to a dictionary.
        input_files = gen.write(format="ses3d_4_0")

        # The rest is only for asserting the produces files.
        path = os.path.join(self.data_dir, "ses3d_4_0_real_world_example")
        for filename in glob.glob(os.path.join(path, "*")):
            with open(filename, "rt") as open_file:
                real_file = open_file.read().strip()
            filename = os.path.basename(filename)

            if filename not in input_files:
                msg = "File '%s' has not been generated" % filename
                raise AssertionError(msg)

            lines = real_file.splitlines()
            new_lines = input_files[filename].splitlines()

            if len(lines) != len(new_lines):
                msg = ("File '%s' does not have the same number of lines "
                    "for the real (%i lines) and generated (%i lines) "
                    "input file") % (filename, len(lines), len(new_lines))
                raise AssertionError(msg)

            for line, new_line in zip(lines, new_lines):
                if line != new_line:
                    msg = "Line differs in file '%s'.\n" % filename
                    msg += "Expected: \"%s\"\n" % line
                    msg += "Got:      \"%s\"\n" % new_line
                    raise AssertionError(msg)
Exemplo n.º 51
0
    def generate_input_files(self, iteration_name, event_name,
                             simulation_type):
        """
        Generate the input files for one event.

        :param iteration_name: The name of the iteration.
        :param event_name: The name of the event for which to generate the
            input files.
        :param simulate_type: The type of simulation to perform. Possible
            values are: 'normal simulate', 'adjoint forward', 'adjoint
            reverse'
        """
        from wfs_input_generator import InputFileGenerator

        # =====================================================================
        # read iteration xml file, get event and list of stations
        # =====================================================================

        iteration = self.comm.iterations.get(iteration_name)

        # Check that the event is part of the iterations.
        if event_name not in iteration.events:
            msg = ("Event '%s' not part of iteration '%s'.\nEvents available "
                   "in iteration:\n\t%s" %
                   (event_name, iteration_name, "\n\t".join(
                       sorted(iteration.events.keys()))))
            raise ValueError(msg)

        event = self.comm.events.get(event_name)
        stations_for_event = list(
            iteration.events[event_name]["stations"].keys())

        # Get all stations and create a dictionary for the input file
        # generator.
        stations = self.comm.query.get_all_stations_for_event(event_name)
        stations = [{
            "id": key,
            "latitude": value["latitude"],
            "longitude": value["longitude"],
            "elevation_in_m": value["elevation_in_m"],
            "local_depth_in_m": value["local_depth_in_m"]
        } for key, value in stations.items() if key in stations_for_event]

        # =====================================================================
        # set solver options
        # =====================================================================

        solver = iteration.solver_settings

        # Currently only SES3D 4.1 is supported
        solver_format = solver["solver"].lower()
        if solver_format not in [
                "ses3d 4.1", "ses3d 2.0", "specfem3d cartesian",
                "specfem3d globe cem"
        ]:
            msg = ("Currently only SES3D 4.1, SES3D 2.0, SPECFEM3D "
                   "CARTESIAN, and SPECFEM3D GLOBE CEM are supported.")
            raise ValueError(msg)
        solver_format = solver_format.replace(' ', '_')
        solver_format = solver_format.replace('.', '_')

        solver = solver["solver_settings"]

        # =====================================================================
        # create the input file generator, add event and stations,
        # populate the configuration items
        # =====================================================================

        # Add the event and the stations to the input file generator.
        gen = InputFileGenerator()
        gen.add_events(event["filename"])
        gen.add_stations(stations)

        if solver_format in ["ses3d_4_1", "ses3d_2_0"]:
            # event tag
            gen.config.event_tag = event_name

            # Time configuration.
            npts = solver["simulation_parameters"]["number_of_time_steps"]
            delta = solver["simulation_parameters"]["time_increment"]
            gen.config.number_of_time_steps = npts
            gen.config.time_increment_in_s = delta

            # SES3D specific configuration
            gen.config.output_folder = solver["output_directory"].replace(
                "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.simulation_type = simulation_type

            gen.config.adjoint_forward_wavefield_output_folder = \
                solver["adjoint_output_parameters"][
                    "forward_field_output_directory"].replace(
                    "{{EVENT_NAME}}", event_name.replace(" ", "_"))
            gen.config.adjoint_forward_sampling_rate = \
                solver["adjoint_output_parameters"][
                    "sampling_rate_of_forward_field"]

            # Visco-elastic dissipation
            diss = solver["simulation_parameters"]["is_dissipative"]
            gen.config.is_dissipative = diss

            # Only SES3D 4.1 has the relaxation parameters.
            if solver_format == "ses3d_4_1":
                gen.config.Q_model_relaxation_times = \
                    solver["relaxation_parameter_list"]["tau"]
                gen.config.Q_model_weights_of_relaxation_mechanisms = \
                    solver["relaxation_parameter_list"]["w"]

            # Discretization
            disc = solver["computational_setup"]
            gen.config.nx_global = disc["nx_global"]
            gen.config.ny_global = disc["ny_global"]
            gen.config.nz_global = disc["nz_global"]
            gen.config.px = disc["px_processors_in_theta_direction"]
            gen.config.py = disc["py_processors_in_phi_direction"]
            gen.config.pz = disc["pz_processors_in_r_direction"]
            gen.config.lagrange_polynomial_degree = \
                disc["lagrange_polynomial_degree"]

            # Configure the mesh.
            domain = self.comm.project.domain
            gen.config.mesh_min_latitude = domain.min_latitude
            gen.config.mesh_max_latitude = domain.max_latitude
            gen.config.mesh_min_longitude = domain.min_longitude
            gen.config.mesh_max_longitude = domain.max_longitude
            gen.config.mesh_min_depth_in_km = domain.min_depth_in_km
            gen.config.mesh_max_depth_in_km = domain.max_depth_in_km

            # Set the rotation parameters.
            gen.config.rotation_angle_in_degree = \
                domain.rotation_angle_in_degree
            gen.config.rotation_axis = domain.rotation_axis

            # Make source time function
            gen.config.source_time_function = \
                iteration.get_source_time_function()["data"]
        elif solver_format == "specfem3d_cartesian":
            gen.config.NSTEP = \
                solver["simulation_parameters"]["number_of_time_steps"]
            gen.config.DT = \
                solver["simulation_parameters"]["time_increment"]
            gen.config.NPROC = \
                solver["computational_setup"]["number_of_processors"]
            if simulation_type == "normal simulation":
                msg = ("'normal_simulate' not supported for SPECFEM3D "
                       "Cartesian. Please choose either 'adjoint_forward' or "
                       "'adjoint_reverse'.")
                raise NotImplementedError(msg)
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
            else:
                raise NotImplementedError
            solver_format = solver_format.upper()

        elif solver_format == "specfem3d_globe_cem":
            cs = solver["computational_setup"]
            gen.config.NPROC_XI = cs["number_of_processors_xi"]
            gen.config.NPROC_ETA = cs["number_of_processors_eta"]
            gen.config.NCHUNKS = cs["number_of_chunks"]
            gen.config.NEX_XI = cs["elements_per_chunk_xi"]
            gen.config.NEX_ETA = cs["elements_per_chunk_eta"]
            gen.config.OCEANS = cs["simulate_oceans"]
            gen.config.ELLIPTICITY = cs["simulate_ellipticity"]
            gen.config.TOPOGRAPHY = cs["simulate_topography"]
            gen.config.GRAVITY = cs["simulate_gravity"]
            gen.config.ROTATION = cs["simulate_rotation"]
            gen.config.ATTENUATION = cs["simulate_attenuation"]
            gen.config.ABSORBING_CONDITIONS = True
            if cs["fast_undo_attenuation"]:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = True
                gen.config.UNDO_ATTENUATION = False
            else:
                gen.config.PARTIAL_PHYS_DISPERSION_ONLY = False
                gen.config.UNDO_ATTENUATION = True
            gen.config.GPU_MODE = cs["use_gpu"]
            gen.config.SOURCE_TIME_FUNCTION = \
                iteration.get_source_time_function()["data"]

            if simulation_type == "normal simulation":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = False
            elif simulation_type == "adjoint forward":
                gen.config.SIMULATION_TYPE = 1
                gen.config.SAVE_FORWARD = True
            elif simulation_type == "adjoint reverse":
                gen.config.SIMULATION_TYPE = 2
                gen.config.SAVE_FORWARD = True
            else:
                raise NotImplementedError

            # Use the current domain setting to derive the bounds in the way
            # SPECFEM specifies them.
            domain = self.comm.project.domain

            lat_range = domain.max_latitude - \
                domain.min_latitude
            lng_range = domain.max_longitude - \
                domain.min_longitude

            c_lat = \
                domain.min_latitude + lat_range / 2.0
            c_lng = \
                domain.min_longitude + lng_range / 2.0

            # Rotate the point.
            c_lat_1, c_lng_1 = rotations.rotate_lat_lon(
                c_lat, c_lng, domain.rotation_axis,
                domain.rotation_angle_in_degree)

            # SES3D rotation.
            A = rotations._get_rotation_matrix(domain.rotation_axis,
                                               domain.rotation_angle_in_degree)

            latitude_rotation = -(c_lat_1 - c_lat)
            longitude_rotation = c_lng_1 - c_lng

            # Rotate the latitude. The rotation axis is latitude 0 and
            # the center longitude + 90 degree
            B = rotations._get_rotation_matrix(
                rotations.lat_lon_radius_to_xyz(0.0, c_lng + 90, 1.0),
                latitude_rotation)
            # Rotate around the North pole.
            C = rotations._get_rotation_matrix([0.0, 0.0, 1.0],
                                               longitude_rotation)

            D = A * np.linalg.inv(C * B)

            axis, angle = rotations._get_axis_and_angle_from_rotation_matrix(D)
            rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            # Consistency check
            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                axis *= -1.0
                angle *= -1.0
                rotated_axis = rotations.xyz_to_lat_lon_radius(*axis)

            if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \
                    abs(rotated_axis[1] - c_lng_1) >= 0.01:
                msg = "Failed to describe the domain in terms that SPECFEM " \
                      "understands. The domain definition in the output " \
                      "files will NOT BE CORRECT!"
                warnings.warn(msg, LASIFWarning)

            gen.config.ANGULAR_WIDTH_XI_IN_DEGREES = lng_range
            gen.config.ANGULAR_WIDTH_ETA_IN_DEGREES = lat_range
            gen.config.CENTER_LATITUDE_IN_DEGREES = c_lat_1
            gen.config.CENTER_LONGITUDE_IN_DEGREES = c_lng_1
            gen.config.GAMMA_ROTATION_AZIMUTH = angle

            gen.config.MODEL = cs["model"]

            pp = iteration.get_process_params()
            gen.config.RECORD_LENGTH_IN_MINUTES = \
                (pp["npts"] * pp["dt"]) / 60.0
            solver_format = solver_format.upper()

        else:
            msg = "Unknown solver '%s'." % solver_format
            raise NotImplementedError(msg)

        # =================================================================
        # output
        # =================================================================
        output_dir = self.comm.project.get_output_folder(
            type="input_files",
            tag="ITERATION_%s__%s__EVENT_%s" %
            (iteration_name, simulation_type.replace(" ", "_"), event_name))

        gen.write(format=solver_format, output_dir=output_dir)
        print("Written files to '%s'." % output_dir)