def test_adding_multiple_events_JSON(): """ Tests adding multiple events as JSON. """ events = [{ "latitude": 45.0, "description": "Some description", "longitude": 12.1, "depth_in_km": 13.0, "origin_time": str(obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000)), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18 }, { "latitude": 13.93, "description": "Some other description", "longitude": -92.47, "depth_in_km": 28.7, "origin_time": str(obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000)), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19}] gen = InputFileGenerator() gen.add_events(json.dumps(events)) events[0]["origin_time"] = obspy.UTCDateTime(events[0]["origin_time"]) events[1]["origin_time"] = obspy.UTCDateTime(events[1]["origin_time"]) assert sorted(gen._events) == sorted(events)
def test_adding_invalid_file_to_station_raises(): """ Adding some invalid things should of course raise. """ gen = InputFileGenerator() with pytest.raises(IOError): gen.add_stations("some_nonesense")
def test_configuration_via_a_dictionary(): """ Tests that a dictionary can be used to update the configuration. """ gen = InputFileGenerator() gen.config.test = "1" assert gen.config == {"test": "1"} gen.add_configuration({ "something_else": 2, "and_more": 3.0}) assert gen.config == { "test": "1", "something_else": 2, "and_more": 3.0} # Adding the something that already exists overwrites. gen.add_configuration({ "test": "4"}) assert gen.config == { "test": "4", "something_else": 2, "and_more": 3.0}
def test_adding_invalid_file_to_event_raises(): """ Adding some invalid things should of course raise. """ gen = InputFileGenerator() with pytest.raises(ValueError): gen.add_events("some_nonesense")
def test_configuration_via_JSON(): """ A JSON document can also be used. """ gen = InputFileGenerator() gen.config.test = "1" assert gen.config == {"test": "1"} gen.add_configuration(json.dumps({ "something_else": 2, "and_more": 3.0})) assert gen.config == { "test": "1", "something_else": 2, "and_more": 3.0} # Adding the something that already exists overwrites. gen.add_configuration(json.dumps({ "test": "4"})) assert gen.config == { "test": "4", "something_else": 2, "and_more": 3.0}
def test_id_lat_lon_ele_are_necessary(self): """ Tests that some station fields need to be set. """ # Station with missing id. station_1 = {"latitude": 47.737167, "longitude": 11.2752, "elevation_in_m": 565.0} # Station with missing latitude. station_2 = {"id": "BW.FURT", "longitude": 11.2752, "elevation_in_m": 565.0} # Station with missing longitude. station_3 = {"id": "BW.FURT", "latitude": 47.737167, "elevation_in_m": 565.0} # Station with missing elevation. station_4 = {"id": "BW.FURT", "latitude": 47.737167, "longitude": 11.2752} # Station with everything necessary station_5 = {"id": "BW.FURT", "latitude": 47.737167, "longitude": 11.2752, "elevation_in_m": 565.0} gen = InputFileGenerator() # The first 4 should raise a ValueError self.assertRaises(ValueError, gen.add_stations, station_1) self.assertRaises(ValueError, gen.add_stations, station_2) self.assertRaises(ValueError, gen.add_stations, station_3) self.assertRaises(ValueError, gen.add_stations, station_4) # The last one not. gen.add_stations(station_5)
def test_reading_QuakeML_files(self): """ Tests the reading of QuakeML Files. """ event_file_1 = os.path.join(self.data_dir, "event1.xml") event_file_2 = os.path.join(self.data_dir, "event2.xml") gen = InputFileGenerator() gen.add_events([event_file_1, event_file_2]) # Sort to be able to compare. events = sorted(gen._events) self.assertEqual([{ "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18 }, { "latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19}], events)
def test_adding_events_as_URL(): """ QuakeMLs should be downloaded if necessary. Mock the actual downloading. """ event = {"description": "FICTIONAL EVENT IN BAVARIA", "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18, "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"} quake_ml_file = os.path.join(DATA, "event1.xml") with open(quake_ml_file, "rb") as fh: data = fh.read() gen = InputFileGenerator() # Mock the URL with mock.patch("urllib2.urlopen") as patch: class Dummy(object): def read(self): return data patch.return_value = Dummy() gen.add_events("http://some_url.com") patch.assert_called_once_with("http://some_url.com") assert [event] == gen._events
def test_reading_events_from_dictionary(self): """ Tests that events can also be passed as dictionaries. """ events = [{ "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18 }, { "latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19}] gen = InputFileGenerator() gen.add_events(events) self.assertEqual(sorted(gen._events), sorted(events))
def test_adding_sac_file_without_coordinates(): """ This sac file has no coordinates, thus no station should actually be added. """ sac_file = os.path.join(DATA, "example_without_coordinates.sac") gen = InputFileGenerator() gen.add_stations(sac_file) assert gen._stations == []
def test_station_filter_JSON(): """ station filters can be set as JSON. """ filters = ["BW.HH*", "NE.*"] gen = InputFileGenerator() gen.station_filter = json.dumps(filters) assert gen.station_filter == filters
def test_event_filter_JSON(): """ Event filters can be set as JSON. """ filters = ["smi:some/url", "smi:some/other/url"] gen = InputFileGenerator() gen.event_filter = json.dumps(filters) assert gen.event_filter == filters
def test_adding_stations_as_URLs(): """ StationXML should be downloaded if necessary. Mock the actual downloading. """ stations = [ {"id": "HT.HORT", "latitude": 40.5978, "longitude": 23.0995, "elevation_in_m": 925.0, "local_depth_in_m": 0.0}, {"id": "HT.LIT", "latitude": 40.1003, "longitude": 22.489, "elevation_in_m": 568.0, "local_depth_in_m": 0.0}, {"id": "HT.PAIG", "latitude": 39.9363, "longitude": 23.6768, "elevation_in_m": 213.0, "local_depth_in_m": 0.0}, {"id": "HT.SOH", "latitude": 40.8206, "longitude": 23.3556, "elevation_in_m": 728.0, "local_depth_in_m": 0.0}, {"id": "HT.THE", "latitude": 40.6319, "longitude": 22.9628, "elevation_in_m": 124.0, "local_depth_in_m": 0.0}, {"id": "HT.XOR", "latitude": 39.366, "longitude": 23.192, "elevation_in_m": 500.0, "local_depth_in_m": 0.0}] station_xml_file = os.path.join(DATA, "station.xml") with open(station_xml_file, "rb") as fh: data = fh.read() gen = InputFileGenerator() # Mock the URL with mock.patch("urllib2.urlopen") as patch: class Dummy(object): def read(self): return data patch.return_value = Dummy() gen.add_stations("http://some_url.com") patch.assert_called_once_with("http://some_url.com") assert sorted(stations) == sorted(gen._stations)
def test_config_raises_error_if_wrong_type(): """ The configuration method should raise in case a wrong type is added. """ gen = InputFileGenerator() with pytest.raises(ValueError): gen.add_configuration("something") # Same with JSON if it is not a JSON object but a list. with pytest.raises(ValueError): gen.add_configuration(json.dumps([{"something": "new"}]))
def test_adding_a_single_station_dictionary(): """ Tests adding a single station dictionary. """ station = { "id": "BW.FURT", "latitude": 48.162899, "longitude": 11.2752, "elevation_in_m": 565.0, "local_depth_in_m": 10.0} gen = InputFileGenerator() gen.add_stations(station) assert [station] == gen._stations
def test_adding_stations_as_SAC_files(): """ Tests adding stations as SAC files. """ sac_file = os.path.join(DATA, "example.sac") gen = InputFileGenerator() gen.add_stations(sac_file) assert gen._stations[0]["id"] == "IU.ANMO" assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0 assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0 assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0 assert round(gen._stations[0]["local_depth_in_m"] - 145.0, 5) == 0
def test_station_dicts_with_invalid_information_raise(): """ Station dicts that have invalid types that cannot be converted should raise! """ # All the coordinate values should be converted to floats. station = {"id": "BW.FURT", "latitude": "A", "longitude": 2, "elevation_in_m": 3, "local_depth_in_m": 4} gen = InputFileGenerator() with pytest.raises(ValueError): gen.add_stations(station)
def test_adding_sac_file_without_local_depth(): """ This file has no local depth. This should be ok. """ sac_file = os.path.join(DATA, "example_without_local_depth.sac") gen = InputFileGenerator() gen.add_stations(sac_file) assert gen._stations[0]["id"] == "IU.ANMO" assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0 assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0 assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0 # Local depth will be set to 0 in case it is not available. assert gen._stations[0]["local_depth_in_m"] == 0
def test_adding_a_single_station_as_JSON(): """ Asserts that a single station can be added as JSON. """ station = { "id": "BW.FURT", "latitude": 48.162899, "longitude": 11.2752, "elevation_in_m": 565.0, "local_depth_in_m": 10.0} json_station = json.dumps(station) gen = InputFileGenerator() gen.add_stations(json_station) assert [station] == gen._stations
def test_wrong_stf_header_format(): """ Simple test asserting that the correct exceptions get raised when attempting to write invalid STF headers. """ station = { "id": "KO.ADVT", "latitude": 41.0, "longitude": 33.1234, "elevation_in_m": 10} event = { "latitude": 39.260, "longitude": 41.040, "depth_in_km": 5.0, "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": 1.0e16, "m_tt": 1.0e16, "m_pp": 1.0e16, "m_rt": 0.0, "m_rp": 0.0, "m_tp": 0.0} gen = InputFileGenerator() gen.add_stations(station) gen.add_events(event) # Configure it. gen.config.number_of_time_steps = 4000 gen.config.time_increment_in_s = 0.13 gen.config.output_folder = "../DATA/OUTPUT/1.8s/" gen.config.mesh_min_latitude = 34.1 gen.config.mesh_max_latitude = 42.9 gen.config.mesh_min_longitude = 23.1 gen.config.mesh_max_longitude = 42.9 gen.config.mesh_min_depth_in_km = 0.0 gen.config.mesh_max_depth_in_km = 471.0 gen.config.nx_global = 66 gen.config.ny_global = 108 gen.config.nz_global = 28 gen.config.px = 3 gen.config.py = 4 gen.config.pz = 4 gen.config.source_time_function = np.linspace(1.0, 0.0, 4000) gen.config.is_dissipative = False gen.config.stf_header = "simple string" # Write the input files to a dictionary. with pytest.raises(ValueError): gen.write(format="ses3d_4_1") gen.config.stf_header = ["1", "2", "3", "4", "5", "6"] # Write the input files to a dictionary. with pytest.raises(ValueError): gen.write(format="ses3d_4_1")
def test_automatic_type_converstion_for_station_dict(self): """ Fields should undergo automatic type conversion. """ # All the coordinate values should be converted to floats. station = {"id": "BW.FURT", "latitude": 1, "longitude": 2, "elevation_in_m": "3", "local_depth_in_m": "4"} gen = InputFileGenerator() gen.add_stations(station) self.assertEqual(type(gen._stations[0]["latitude"]), float) self.assertEqual(type(gen._stations[0]["longitude"]), float) self.assertEqual(type(gen._stations[0]["elevation_in_m"]), float) self.assertEqual(type(gen._stations[0]["local_depth_in_m"]), float)
def test_adding_dict_with_missing_keys(): """ Tests the adding of a dictionary with missing keys. """ event = { "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18} gen = InputFileGenerator() with pytest.raises(ValueError): gen.add_events(event)
def test_passing_station_dictionaries(self): """ Checks that stations can also be passed as dictionaries. """ stations = [{"id": "BW.FURT", "latitude": 48.162899, "longitude": 11.2752, "elevation_in_m": 565.0, "local_depth_in_m": 10.0}, {"id": "BW.RJOB", "latitude": 47.737167, "longitude": 12.795714, "elevation_in_m": 860.0, "local_depth_in_m": 2.0}] gen = InputFileGenerator() gen.add_stations(stations) self.assertEqual(sorted(stations), sorted(gen._stations))
def test_local_depth_will_be_set_to_zero(self): """ Tests that the local depth will be set to zero if not given. """ stations = [{"id": "BW.FURT", "latitude": 48.162899, "longitude": 11.2752, "elevation_in_m": 565.0}, {"id": "BW.RJOB", "latitude": 47.737167, "longitude": 12.795714, "elevation_in_m": 860.0}] gen = InputFileGenerator() gen.add_stations(stations) # Now add the local depth again. stations[0]["local_depth_in_m"] = 0.0 stations[1]["local_depth_in_m"] = 0.0 self.assertEqual(sorted(stations), sorted(gen._stations))
def test_adding_multiple_stations_as_JSON(): """ Tests that stations can be added as a JSON list. """ stations = [{ "id": "BW.FURT", "latitude": 48.162899, "longitude": 11.2752, "elevation_in_m": 565.0, "local_depth_in_m": 10.0}, {"id": "BW.RJOB", "latitude": 47.737167, "longitude": 12.795714, "elevation_in_m": 860.0, "local_depth_in_m": 2.0}] json_stations = json.dumps(stations) gen = InputFileGenerator() gen.add_stations(json_stations) assert sorted(stations) == sorted(gen._stations)
def test_adding_stations_as_SEED_files_via_BytesIO(): """ Tests adding stations as SEED files. """ seed_file = os.path.join(DATA, "dataless.seed.BW_FURT") with open(seed_file, "rb") as fh: seed_file_mem_file = io.BytesIO(fh.read()) gen = InputFileGenerator() gen.add_stations(seed_file_mem_file) # Sort to be able to compare. assert gen._stations == \ [{"id": "BW.FURT", "latitude": 48.162899, "longitude": 11.2752, "elevation_in_m": 565.0, "local_depth_in_m": 0.0}]
def test_adding_single_event_dictionary(): """ Adding a single event dictionary. """ event = { "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "description": "Some description", "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18} gen = InputFileGenerator() gen.add_events(event) assert gen._events == [event]
def test_event_dictionary_automatic_type_conversion(): """ The types for the event dictionary should also undergo automatic type conversion. """ event = { "description": 1, "latitude": "1", "longitude": "2", "depth_in_km": "3", "origin_time": "2012-01-01T00:00:00.000000Z", "m_rr": "-2.11e+18", "m_tt": "-4.22e+19", "m_pp": "4.43e+19", "m_rt": "-9.35e+18", "m_rp": "-8.38e+18", "m_tp": "-6.44e+18"} gen = InputFileGenerator() gen.add_events(event) assert type(gen._events[0]["latitude"]) == float assert type(gen._events[0]["longitude"]) == float assert type(gen._events[0]["depth_in_km"]) == float assert type(gen._events[0]["origin_time"]) == obspy.UTCDateTime assert type(gen._events[0]["m_rr"]) == float assert type(gen._events[0]["m_tt"]) == float assert type(gen._events[0]["m_pp"]) == float assert type(gen._events[0]["m_rt"]) == float assert type(gen._events[0]["m_rp"]) == float assert type(gen._events[0]["m_tp"]) == float assert gen._events == [{ "description": "1", "latitude": 1.0, "longitude": 2.0, "depth_in_km": 3.0, "origin_time": obspy.UTCDateTime(2012, 1, 1), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18}]
def test_reading_QuakeML_from_BytesIO(): """ Tests the reading of QuakeML from BytesIO. """ event_file_1 = os.path.join(DATA, "event1.xml") event_file_2 = os.path.join(DATA, "event2.xml") with open(event_file_1, "rb") as fh: event_file_1_mem = io.BytesIO(fh.read()) with open(event_file_2, "rb") as fh: event_file_2_mem = io.BytesIO(fh.read()) gen = InputFileGenerator() gen.add_events([event_file_1_mem, event_file_2_mem]) # Sort to be able to compare. assert sorted(gen._events) == \ [{"description": "FICTIONAL EVENT IN BAVARIA", "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18, "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}, {"description": "GUATEMALA", "latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19, "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]
def main(): gen = InputFileGenerator() # SES3D 4.0 can only simulate one event at a time. gen.add_events("../tests/data/event1.xml") gen.add_stations(["../tests/data/dataless.seed.BW_FURT", "../tests/data/dataless.seed.BW_RJOB"]) # Just perform a standard forward simulation. gen.config.simulation_type = "normal simulation" gen.config.output_folder = "../OUTPUT" # Time configuration. gen.config.number_of_time_steps = 700 gen.config.time_increment_in_s = 0.75 # SES3D specific configuration gen.config.output_directory = "../DATA/OUTPUT/1.8s" # SES3D specific discretization gen.config.nx_global = 66 gen.config.ny_global = 108 gen.config.nz_global = 28 gen.config.px = 3 gen.config.py = 4 gen.config.pz = 4 # Specify some source time function. gen.config.source_time_function = np.sin(np.linspace(0, 10, 700)) # Configure the mesh. gen.config.mesh_min_latitude = -50.0 gen.config.mesh_max_latitude = 50.0 gen.config.mesh_min_longitude = -50.0 gen.config.mesh_max_longitude = 50.0 gen.config.mesh_min_depth_in_km = 0.0 gen.config.mesh_max_depth_in_km = 200.0 # Define the rotation. Take care this is defined as the rotation of the # mesh. The data will be rotated in the opposite direction! The following # example will rotate the mesh 5 degrees southwards around the x-axis. For # a definition of the coordinate system refer to the rotations.py file. The # rotation is entirely optional. gen.config.rotation_angle_in_degree = 5.0 gen.config.rotation_axis = [1.0, 0.0, 0.0] # Define Q gen.config.is_dissipative = True gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973] gen.config.Q_model_weights_of_relaxation_mechanisms = \ [2.5100, 2.4354, 0.0879] # Finally write the file to a folder. If not output directory is given, a # dictionary containing all the files will be returned. gen.write(format="ses3d_4_0", output_dir="output") print "Written files to 'output' folder."
def generate_input_files(self, iteration_name, event_name, simulation_type): """ Generate the input files for one event. :param iteration_name: The name of the iteration. :param event_name: The name of the event for which to generate the input files. :param simulate_type: The type of simulation to perform. Possible values are: 'normal simulate', 'adjoint forward', 'adjoint reverse' """ from wfs_input_generator import InputFileGenerator # ===================================================================== # read iteration xml file, get event and list of stations # ===================================================================== iteration = self.comm.iterations.get(iteration_name) # Check that the event is part of the iterations. if event_name not in iteration.events: msg = ("Event '%s' not part of iteration '%s'.\nEvents available " "in iteration:\n\t%s" % (event_name, iteration_name, "\n\t".join( sorted(iteration.events.keys())))) raise ValueError(msg) event = self.comm.events.get(event_name) stations_for_event = list( iteration.events[event_name]["stations"].keys()) # Get all stations and create a dictionary for the input file # generator. stations = self.comm.query.get_all_stations_for_event(event_name) stations = [{ "id": key, "latitude": value["latitude"], "longitude": value["longitude"], "elevation_in_m": value["elevation_in_m"], "local_depth_in_m": value["local_depth_in_m"] } for key, value in stations.items() if key in stations_for_event] # ===================================================================== # set solver options # ===================================================================== solver = iteration.solver_settings # Currently only SES3D 4.1 is supported solver_format = solver["solver"].lower() if solver_format not in [ "ses3d 4.1", "ses3d 2.0", "specfem3d cartesian", "specfem3d globe cem" ]: msg = ("Currently only SES3D 4.1, SES3D 2.0, SPECFEM3D " "CARTESIAN, and SPECFEM3D GLOBE CEM are supported.") raise ValueError(msg) solver_format = solver_format.replace(' ', '_') solver_format = solver_format.replace('.', '_') solver = solver["solver_settings"] # ===================================================================== # create the input file generator, add event and stations, # populate the configuration items # ===================================================================== # Add the event and the stations to the input file generator. gen = InputFileGenerator() gen.add_events(event["filename"]) gen.add_stations(stations) if solver_format in ["ses3d_4_1", "ses3d_2_0"]: # event tag gen.config.event_tag = event_name # Time configuration. npts = solver["simulation_parameters"]["number_of_time_steps"] delta = solver["simulation_parameters"]["time_increment"] gen.config.number_of_time_steps = npts gen.config.time_increment_in_s = delta # SES3D specific configuration gen.config.output_folder = solver["output_directory"].replace( "{{EVENT_NAME}}", event_name.replace(" ", "_")) gen.config.simulation_type = simulation_type gen.config.adjoint_forward_wavefield_output_folder = \ solver["adjoint_output_parameters"][ "forward_field_output_directory"].replace( "{{EVENT_NAME}}", event_name.replace(" ", "_")) gen.config.adjoint_forward_sampling_rate = \ solver["adjoint_output_parameters"][ "sampling_rate_of_forward_field"] # Visco-elastic dissipation diss = solver["simulation_parameters"]["is_dissipative"] gen.config.is_dissipative = diss # Only SES3D 4.1 has the relaxation parameters. if solver_format == "ses3d_4_1": gen.config.Q_model_relaxation_times = \ solver["relaxation_parameter_list"]["tau"] gen.config.Q_model_weights_of_relaxation_mechanisms = \ solver["relaxation_parameter_list"]["w"] # Discretization disc = solver["computational_setup"] gen.config.nx_global = disc["nx_global"] gen.config.ny_global = disc["ny_global"] gen.config.nz_global = disc["nz_global"] gen.config.px = disc["px_processors_in_theta_direction"] gen.config.py = disc["py_processors_in_phi_direction"] gen.config.pz = disc["pz_processors_in_r_direction"] gen.config.lagrange_polynomial_degree = \ disc["lagrange_polynomial_degree"] # Configure the mesh. domain = self.comm.project.domain gen.config.mesh_min_latitude = domain.min_latitude gen.config.mesh_max_latitude = domain.max_latitude gen.config.mesh_min_longitude = domain.min_longitude gen.config.mesh_max_longitude = domain.max_longitude gen.config.mesh_min_depth_in_km = domain.min_depth_in_km gen.config.mesh_max_depth_in_km = domain.max_depth_in_km # Set the rotation parameters. gen.config.rotation_angle_in_degree = \ domain.rotation_angle_in_degree gen.config.rotation_axis = domain.rotation_axis # Make source time function gen.config.source_time_function = \ iteration.get_source_time_function()["data"] elif solver_format == "specfem3d_cartesian": gen.config.NSTEP = \ solver["simulation_parameters"]["number_of_time_steps"] gen.config.DT = \ solver["simulation_parameters"]["time_increment"] gen.config.NPROC = \ solver["computational_setup"]["number_of_processors"] if simulation_type == "normal simulation": msg = ("'normal_simulate' not supported for SPECFEM3D " "Cartesian. Please choose either 'adjoint_forward' or " "'adjoint_reverse'.") raise NotImplementedError(msg) elif simulation_type == "adjoint forward": gen.config.SIMULATION_TYPE = 1 elif simulation_type == "adjoint reverse": gen.config.SIMULATION_TYPE = 2 else: raise NotImplementedError solver_format = solver_format.upper() elif solver_format == "specfem3d_globe_cem": cs = solver["computational_setup"] gen.config.NPROC_XI = cs["number_of_processors_xi"] gen.config.NPROC_ETA = cs["number_of_processors_eta"] gen.config.NCHUNKS = cs["number_of_chunks"] gen.config.NEX_XI = cs["elements_per_chunk_xi"] gen.config.NEX_ETA = cs["elements_per_chunk_eta"] gen.config.OCEANS = cs["simulate_oceans"] gen.config.ELLIPTICITY = cs["simulate_ellipticity"] gen.config.TOPOGRAPHY = cs["simulate_topography"] gen.config.GRAVITY = cs["simulate_gravity"] gen.config.ROTATION = cs["simulate_rotation"] gen.config.ATTENUATION = cs["simulate_attenuation"] gen.config.ABSORBING_CONDITIONS = True if cs["fast_undo_attenuation"]: gen.config.PARTIAL_PHYS_DISPERSION_ONLY = True gen.config.UNDO_ATTENUATION = False else: gen.config.PARTIAL_PHYS_DISPERSION_ONLY = False gen.config.UNDO_ATTENUATION = True gen.config.GPU_MODE = cs["use_gpu"] gen.config.SOURCE_TIME_FUNCTION = \ iteration.get_source_time_function()["data"] if simulation_type == "normal simulation": gen.config.SIMULATION_TYPE = 1 gen.config.SAVE_FORWARD = False elif simulation_type == "adjoint forward": gen.config.SIMULATION_TYPE = 1 gen.config.SAVE_FORWARD = True elif simulation_type == "adjoint reverse": gen.config.SIMULATION_TYPE = 2 gen.config.SAVE_FORWARD = True else: raise NotImplementedError # Use the current domain setting to derive the bounds in the way # SPECFEM specifies them. domain = self.comm.project.domain lat_range = domain.max_latitude - \ domain.min_latitude lng_range = domain.max_longitude - \ domain.min_longitude c_lat = \ domain.min_latitude + lat_range / 2.0 c_lng = \ domain.min_longitude + lng_range / 2.0 # Rotate the point. c_lat_1, c_lng_1 = rotations.rotate_lat_lon( c_lat, c_lng, domain.rotation_axis, domain.rotation_angle_in_degree) # SES3D rotation. A = rotations._get_rotation_matrix(domain.rotation_axis, domain.rotation_angle_in_degree) latitude_rotation = -(c_lat_1 - c_lat) longitude_rotation = c_lng_1 - c_lng # Rotate the latitude. The rotation axis is latitude 0 and # the center longitude + 90 degree B = rotations._get_rotation_matrix( rotations.lat_lon_radius_to_xyz(0.0, c_lng + 90, 1.0), latitude_rotation) # Rotate around the North pole. C = rotations._get_rotation_matrix([0.0, 0.0, 1.0], longitude_rotation) D = A * np.linalg.inv(C * B) axis, angle = rotations._get_axis_and_angle_from_rotation_matrix(D) rotated_axis = rotations.xyz_to_lat_lon_radius(*axis) # Consistency check if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \ abs(rotated_axis[1] - c_lng_1) >= 0.01: axis *= -1.0 angle *= -1.0 rotated_axis = rotations.xyz_to_lat_lon_radius(*axis) if abs(rotated_axis[0] - c_lat_1) >= 0.01 or \ abs(rotated_axis[1] - c_lng_1) >= 0.01: msg = "Failed to describe the domain in terms that SPECFEM " \ "understands. The domain definition in the output " \ "files will NOT BE CORRECT!" warnings.warn(msg, LASIFWarning) gen.config.ANGULAR_WIDTH_XI_IN_DEGREES = lng_range gen.config.ANGULAR_WIDTH_ETA_IN_DEGREES = lat_range gen.config.CENTER_LATITUDE_IN_DEGREES = c_lat_1 gen.config.CENTER_LONGITUDE_IN_DEGREES = c_lng_1 gen.config.GAMMA_ROTATION_AZIMUTH = angle gen.config.MODEL = cs["model"] pp = iteration.get_process_params() gen.config.RECORD_LENGTH_IN_MINUTES = \ (pp["npts"] * pp["dt"]) / 60.0 solver_format = solver_format.upper() else: msg = "Unknown solver '%s'." % solver_format raise NotImplementedError(msg) # ================================================================= # output # ================================================================= output_dir = self.comm.project.get_output_folder( type="input_files", tag="ITERATION_%s__%s__EVENT_%s" % (iteration_name, simulation_type.replace(" ", "_"), event_name)) gen.write(format=solver_format, output_dir=output_dir) print("Written files to '%s'." % output_dir)
def test_id_lat_lon_ele_are_necessary(): """ Tests that some station fields need to be set. """ # Station with missing id. station_1 = { "latitude": 47.737167, "longitude": 11.2752, "elevation_in_m": 565.0} # Station with missing latitude. station_2 = { "id": "BW.FURT", "longitude": 11.2752, "elevation_in_m": 565.0} # Station with missing longitude. station_3 = { "id": "BW.FURT", "latitude": 47.737167, "elevation_in_m": 565.0} # Station with missing elevation. station_4 = { "id": "BW.FURT", "latitude": 47.737167, "longitude": 11.2752} # Station with everything necessary station_5 = { "id": "BW.FURT", "latitude": 47.737167, "longitude": 11.2752, "elevation_in_m": 565.0} gen = InputFileGenerator() # The first 4 should raise a ValueError with pytest.raises(ValueError): gen.add_stations(station_1) with pytest.raises(ValueError): gen.add_stations(station_2) with pytest.raises(ValueError): gen.add_stations(station_3) with pytest.raises(ValueError): gen.add_stations(station_4) # The last one not. gen.add_stations(station_5) # Do exactly the same with JSON variants. gen = InputFileGenerator() with pytest.raises(ValueError): gen.add_stations(json.dumps(station_1)) with pytest.raises(ValueError): gen.add_stations(json.dumps(station_2)) with pytest.raises(ValueError): gen.add_stations(json.dumps(station_3)) with pytest.raises(ValueError): gen.add_stations(json.dumps(station_4)) gen.add_stations(station_5)
def test_adding_single_and_multiple_station(): """ Reading all files at once or seperate should make not difference. """ seed_file_1 = os.path.join(DATA, "dataless.seed.BW_FURT") seed_file_2 = os.path.join(DATA, "dataless.seed.BW_RJOB") station_1 = { "id": "BW.FURT", "latitude": 48.162899, "longitude": 11.2752, "elevation_in_m": 565.0, "local_depth_in_m": 0.0} station_2 = { "id": "BW.RJOB", "latitude": 47.737167, "longitude": 12.795714, "elevation_in_m": 860.0, "local_depth_in_m": 0.0} # Try with SEED files first. gen1 = InputFileGenerator() gen2 = InputFileGenerator() gen1.add_stations([seed_file_1, seed_file_2]) gen2.add_stations(seed_file_1) gen2.add_stations(seed_file_2) assert sorted(gen1._stations) == sorted(gen2._stations) # Now try with the dictionaries. gen1 = InputFileGenerator() gen2 = InputFileGenerator() gen1.add_stations([station_1, station_2]) gen2.add_stations(station_1) gen2.add_stations(station_2) assert sorted(gen1._stations) == sorted(gen2._stations) # Now with JSON. gen1 = InputFileGenerator() gen2 = InputFileGenerator() gen1.add_stations(json.dumps([station_1, station_2])) gen2.add_stations(json.dumps(station_1)) gen2.add_stations(json.dumps(station_2)) assert sorted(gen1._stations) == sorted(gen2._stations)
def generate_input_files(self, event_name, template_name, simulation_type, source_time_function): """ Generate the input files for one event. :param event_name: The name of the event for which to generate the input files. :param template_name: The name of the input file template :param simulation_type: The type of simulation to perform. Possible values are: 'normal simulation', 'adjoint forward', 'adjoint reverse' :param source_time_function: A function source_time_function(npts, delta), taking the requested number of samples and the time spacing and returning an appropriate source time function as numpy array. """ from lasif import utils from wfs_input_generator import InputFileGenerator # Get the events all_events = self.get_event_dict() if event_name not in all_events: msg = "Event '%s' not found in project." % event_name raise ValueError(msg) event = self.get_event(event_name) # Get the input file templates. template_filename = os.path.join(self.paths["templates"], template_name + ".xml") if not os.path.exists(template_filename): msg = "Template '%s' does not exists." % template_name raise ValueError(msg) input_file = utils.read_ses3d_4_0_template(template_filename) # Get all stations and create a dictionary for the input file # generator. stations = self.get_stations_for_event(event_name) stations = [{ "id": key, "latitude": value["latitude"], "longitude": value["longitude"], "elevation_in_m": value["elevation"], "local_depth_in_m": value["local_depth"] } for key, value in stations.iteritems()] # Add the event and the stations to the input file generator. gen = InputFileGenerator() gen.add_events(event) gen.add_stations(stations) npts = input_file["simulation_parameters"]["number_of_time_steps"] delta = input_file["simulation_parameters"]["time_increment"] # Time configuration. gen.config.number_of_time_steps = npts gen.config.time_increment_in_s = delta # SES3D specific configuration gen.config.output_folder = input_file["output_directory"] gen.config.simulation_type = simulation_type gen.config.adjoint_forward_wavefield_output_folder = \ input_file["adjoint_output_parameters"][ "forward_field_output_directory"] gen.config.adjoint_forward_sampling_rate = \ input_file["adjoint_output_parameters"][ "sampling_rate_of_forward_field"] gen.config.is_dissipative = \ input_file["simulation_parameters"]["is_dissipative"] # Discretization disc = input_file["computational_setup"] gen.config.nx_global = disc["nx_global"] gen.config.ny_global = disc["ny_global"] gen.config.nz_global = disc["nz_global"] gen.config.px = disc["px_processors_in_theta_direction"] gen.config.py = disc["py_processors_in_phi_direction"] gen.config.pz = disc["pz_processors_in_r_direction"] gen.config.lagrange_polynomial_degree = \ disc["lagrange_polynomial_degree"] # Configure the mesh. gen.config.mesh_min_latitude = \ self.domain["bounds"]["minimum_latitude"] gen.config.mesh_max_latitude = \ self.domain["bounds"]["maximum_latitude"] gen.config.mesh_min_longitude = \ self.domain["bounds"]["minimum_longitude"] gen.config.mesh_max_longitude = \ self.domain["bounds"]["maximum_longitude"] gen.config.mesh_min_depth_in_km = \ self.domain["bounds"]["minimum_depth_in_km"] gen.config.mesh_max_depth_in_km = \ self.domain["bounds"]["maximum_depth_in_km"] gen.config.rotation_angle_in_degree = self.domain["rotation_angle"] gen.config.rotation_axis = self.domain["rotation_axis"] gen.config.source_time_function = source_time_function( int(npts), float(delta)) output_dir = self.get_output_folder("input_files___%s" % template_name) gen.write(format="ses3d_4_0", output_dir=output_dir) print "Written files to '%s'." % output_dir