def test_local_depth_will_be_set_to_zero():
    """
    Tests that the local depth will be set to zero if not given.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0}]
    json_stations = json.dumps(stations)
    gen = InputFileGenerator()
    gen.add_stations(stations)
    # Now add the local depth again.
    stations[0]["local_depth_in_m"] = 0.0
    stations[1]["local_depth_in_m"] = 0.0
    assert sorted(stations) == sorted(gen._stations)

    # Repeat with the JSON variant.
    gen = InputFileGenerator()
    gen.add_stations(json_stations)
    assert sorted(stations) == sorted(gen._stations)
def test_id_lat_lon_ele_are_necessary():
    """
    Tests that some station fields need to be set.
    """
    # Station with missing id.
    station_1 = {
        "latitude": 47.737167,
        "longitude": 11.2752,
        "elevation_in_m": 565.0}
    # Station with missing latitude.
    station_2 = {
        "id": "BW.FURT",
        "longitude": 11.2752,
        "elevation_in_m": 565.0}
    # Station with missing longitude.
    station_3 = {
        "id": "BW.FURT",
        "latitude": 47.737167,
        "elevation_in_m": 565.0}
    # Station with missing elevation.
    station_4 = {
        "id": "BW.FURT",
        "latitude": 47.737167,
        "longitude": 11.2752}
    # Station with everything necessary
    station_5 = {
        "id": "BW.FURT",
        "latitude": 47.737167,
        "longitude": 11.2752,
        "elevation_in_m": 565.0}

    gen = InputFileGenerator()
    # The first 4 should raise a ValueError
    with pytest.raises(ValueError):
        gen.add_stations(station_1)
    with pytest.raises(ValueError):
        gen.add_stations(station_2)
    with pytest.raises(ValueError):
        gen.add_stations(station_3)
    with pytest.raises(ValueError):
        gen.add_stations(station_4)
    # The last one not.
    gen.add_stations(station_5)

    # Do exactly the same with JSON variants.
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_1))
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_2))
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_3))
    with pytest.raises(ValueError):
        gen.add_stations(json.dumps(station_4))
    gen.add_stations(station_5)
def test_configuration_via_JSON():
    """
    A JSON document can also be used.
    """
    gen = InputFileGenerator()
    gen.config.test = "1"
    assert gen.config == {"test": "1"}

    gen.add_configuration(json.dumps({
        "something_else": 2,
        "and_more": 3.0}))

    assert gen.config == {
        "test": "1",
        "something_else": 2,
        "and_more": 3.0}

    # Adding the something that already exists overwrites.
    gen.add_configuration(json.dumps({
        "test": "4"}))

    assert gen.config == {
        "test": "4",
        "something_else": 2,
        "and_more": 3.0}
def test_adding_events_as_URL():
    """
    QuakeMLs should be downloaded if necessary.

    Mock the actual downloading.
    """
    event = {"description": "FICTIONAL EVENT IN BAVARIA",
             "latitude": 45.0,
             "longitude": 12.1,
             "depth_in_km": 13.0,
             "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
             "m_rr": -2.11e+18,
             "m_tt": -4.22e+19,
             "m_pp": 4.43e+19,
             "m_rt": -9.35e+18,
             "m_rp": -8.38e+18,
             "m_tp": -6.44e+18,
             "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}

    quake_ml_file = os.path.join(DATA, "event1.xml")
    with open(quake_ml_file, "rb") as fh:
        data = fh.read()

    gen = InputFileGenerator()

    # Mock the URL
    with mock.patch("urllib2.urlopen") as patch:
        class Dummy(object):
            def read(self):
                return data
        patch.return_value = Dummy()
        gen.add_events("http://some_url.com")

    patch.assert_called_once_with("http://some_url.com")
    assert [event] == gen._events
def test_adding_multiple_events_JSON():
    """
    Tests adding multiple events as JSON.
    """
    events = [{
        "latitude": 45.0,
        "description": "Some description",
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": str(obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000)),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18
    }, {
        "latitude": 13.93,
        "description": "Some other description",
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": str(obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000)),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19}]
    gen = InputFileGenerator()
    gen.add_events(json.dumps(events))

    events[0]["origin_time"] = obspy.UTCDateTime(events[0]["origin_time"])
    events[1]["origin_time"] = obspy.UTCDateTime(events[1]["origin_time"])
    assert sorted(gen._events) == sorted(events)
def test_reading_events_from_dictionary():
    """
    Tests that events can also be passed as dictionaries.
    """
    events = [{
        "description": "Event at a sunny place.",
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18
    }, {
        "description": "Event at a rainy place.",
        "latitude": 13.93,
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19}]
    gen = InputFileGenerator()
    gen.add_events(events)
    assert sorted(gen._events) == sorted(events)
def test_configuration_via_a_dictionary():
    """
    Tests that a dictionary can be used to update the configuration.
    """
    gen = InputFileGenerator()
    gen.config.test = "1"
    assert gen.config == {"test": "1"}

    gen.add_configuration({
        "something_else": 2,
        "and_more": 3.0})

    assert gen.config == {
        "test": "1",
        "something_else": 2,
        "and_more": 3.0}

    # Adding the something that already exists overwrites.
    gen.add_configuration({
        "test": "4"})

    assert gen.config == {
        "test": "4",
        "something_else": 2,
        "and_more": 3.0}
def test_adding_invalid_file_to_event_raises():
    """
    Adding some invalid things should of course raise.
    """
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_events("some_nonesense")
def test_adding_sac_file_without_coordinates():
    """
    This sac file has no coordinates, thus no station should actually be added.
    """
    sac_file = os.path.join(DATA, "example_without_coordinates.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)
    assert gen._stations == []
def test_station_filter_JSON():
    """
    station filters can be set as JSON.
    """
    filters = ["BW.HH*", "NE.*"]
    gen = InputFileGenerator()
    gen.station_filter = json.dumps(filters)
    assert gen.station_filter == filters
def test_event_filter_JSON():
    """
    Event filters can be set as JSON.
    """
    filters = ["smi:some/url", "smi:some/other/url"]
    gen = InputFileGenerator()
    gen.event_filter = json.dumps(filters)
    assert gen.event_filter == filters
def main():
    gen = InputFileGenerator()

    # SES3D 4.0 can only simulate one event at a time.
    gen.add_events("../tests/data/event1.xml")
    gen.add_stations([
        "../tests/data/dataless.seed.BW_FURT",
        "../tests/data/dataless.seed.BW_RJOB"
    ])

    # Just perform a standard forward simulation.
    gen.config.simulation_type = "normal simulation"

    gen.config.output_folder = "../OUTPUT"

    # Time configuration.
    gen.config.number_of_time_steps = 700
    gen.config.time_increment_in_s = 0.75

    # SES3D specific configuration
    gen.config.output_directory = "../DATA/OUTPUT/1.8s"
    # SES3D specific discretization
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4

    # Specify some source time function.
    gen.config.source_time_function = np.sin(np.linspace(0, 10, 700))

    # Configure the mesh.
    gen.config.mesh_min_latitude = -50.0
    gen.config.mesh_max_latitude = 50.0
    gen.config.mesh_min_longitude = -50.0
    gen.config.mesh_max_longitude = 50.0
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 200.0

    # Define the rotation. Take care this is defined as the rotation of the
    # mesh.  The data will be rotated in the opposite direction! The following
    # example will rotate the mesh 5 degrees southwards around the x-axis. For
    # a definition of the coordinate system refer to the rotations.py file. The
    # rotation is entirely optional.
    gen.config.rotation_angle_in_degree = 5.0
    gen.config.rotation_axis = [1.0, 0.0, 0.0]

    # Define Q
    gen.config.is_dissipative = True
    gen.config.Q_model_relaxation_times = [1.7308, 14.3961, 22.9973]
    gen.config.Q_model_weights_of_relaxation_mechanisms = \
        [2.5100, 2.4354, 0.0879]

    # Finally write the file to a folder. If not output directory is given, a
    # dictionary containing all the files will be returned.
    gen.write(format="ses3d_4_0", output_dir="output")
    print "Written files to 'output' folder."
def test_adding_stations_as_URLs():
    """
    StationXML should be downloaded if necessary.

    Mock the actual downloading.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]

    station_xml_file = os.path.join(DATA, "station.xml")
    with open(station_xml_file, "rb") as fh:
        data = fh.read()

    gen = InputFileGenerator()

    # Mock the URL
    with mock.patch("urllib2.urlopen") as patch:
        class Dummy(object):
            def read(self):
                return data
        patch.return_value = Dummy()
        gen.add_stations("http://some_url.com")

    patch.assert_called_once_with("http://some_url.com")
    assert sorted(stations) == sorted(gen._stations)
def test_event_filter():
    """
    Tests the filtering of the events.

    This is funky. If any filter is given, it will remove all events that do
    not have an event_id.
    """
    events = \
        [{"latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "description": "FICTIONAL EVENT IN BAVARIA",
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "description": "GUATEMALA",
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]

    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")
    gen = InputFileGenerator()
    gen.add_events([event_file_1, event_file_2])

    assert sorted(gen._events) == sorted(events)

    # No applied filter should just result in the same stations being available
    # everywhere.
    assert sorted(gen._filtered_events) == sorted(gen._events)

    # Events filters are a simple list of URLS.
    gen.event_filter = ["smi:local/Event/2013-01-07T13:58:41.209477"]
    # Only the last event should now be available.
    assert sorted(gen._filtered_events) == sorted(events[1:])

    # Removing the filter should make the missing events reappear.
    gen.event_filter = None
    assert sorted(gen._filtered_events) == sorted(gen._events)
    gen.station_filter = []
    assert sorted(gen._filtered_events) == sorted(gen._events)
def test_config_raises_error_if_wrong_type():
    """
    The configuration method should raise in case a wrong type is added.
    """
    gen = InputFileGenerator()

    with pytest.raises(ValueError):
        gen.add_configuration("something")

    # Same with JSON if it is not a JSON object but a list.
    with pytest.raises(ValueError):
        gen.add_configuration(json.dumps([{"something": "new"}]))
def test_station_filter():
    """
    Tests the filtering of the stations.
    """
    stations = [
        {"id": "HT.HORT",
         "latitude": 40.5978,
         "longitude": 23.0995,
         "elevation_in_m": 925.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.LIT",
         "latitude": 40.1003,
         "longitude": 22.489,
         "elevation_in_m": 568.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.PAIG",
         "latitude": 39.9363,
         "longitude": 23.6768,
         "elevation_in_m": 213.0,
         "local_depth_in_m": 0.0},
        {"id": "HT.SOH",
         "latitude": 40.8206,
         "longitude": 23.3556,
         "elevation_in_m": 728.0,
         "local_depth_in_m": 0.0},
        {"id": "AA.THE",
         "latitude": 40.6319,
         "longitude": 22.9628,
         "elevation_in_m": 124.0,
         "local_depth_in_m": 0.0},
        {"id": "BL.XOR",
         "latitude": 39.366,
         "longitude": 23.192,
         "elevation_in_m": 500.0,
         "local_depth_in_m": 0.0}]

    gen = InputFileGenerator()
    gen.add_stations(stations)

    # No applied filter should just result in the same stations being available
    # everywhere.
    assert sorted(gen._filtered_stations) == sorted(gen._stations)

    # Wildcards are ok.
    gen.station_filter = ["HT.*", "AA.*"]
    # Only the last stations should not be available.
    assert sorted(gen._filtered_stations) == sorted(stations[:-1])

    # Removing the filter should make the missing stations reappear.
    gen.station_filter = None
    assert sorted(gen._filtered_stations) == sorted(gen._stations)
    gen.station_filter = []
    assert sorted(gen._filtered_stations) == sorted(gen._stations)
def test_adding_stations_as_SAC_files():
    """
    Tests adding stations as SAC files.
    """
    sac_file = os.path.join(DATA, "example.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)

    assert gen._stations[0]["id"] == "IU.ANMO"
    assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0
    assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0
    assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0
    assert round(gen._stations[0]["local_depth_in_m"] - 145.0, 5) == 0
def test_adding_a_single_station_dictionary():
    """
    Tests adding a single station dictionary.
    """
    station = {
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0}
    gen = InputFileGenerator()
    gen.add_stations(station)
    assert [station] == gen._stations
def test_test_all_files_have_an_empty_last_line():
    """
    Tests that all files have an empty last line.
    """
    station = {
        "id": "KO.ADVT",
        "latitude": 41.0,
        "longitude": 33.1234,
        "elevation_in_m": 10
    }
    event = {
        "latitude": 39.260,
        "longitude": 41.040,
        "depth_in_km": 5.0,
        "origin_time": UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": 1.0e16,
        "m_tt": 1.0e16,
        "m_pp": 1.0e16,
        "m_rt": 0.0,
        "m_rp": 0.0,
        "m_tp": 0.0
    }

    gen = InputFileGenerator()
    gen.add_stations(station)
    gen.add_events(event)

    # Configure it.
    gen.config.number_of_time_steps = 4000
    gen.config.time_increment_in_s = 0.13
    gen.config.output_folder = "../DATA/OUTPUT/1.8s/"
    gen.config.mesh_min_latitude = 34.1
    gen.config.mesh_max_latitude = 42.9
    gen.config.mesh_min_longitude = 23.1
    gen.config.mesh_max_longitude = 42.9
    gen.config.mesh_min_depth_in_km = 0.0
    gen.config.mesh_max_depth_in_km = 471.0
    gen.config.nx_global = 66
    gen.config.ny_global = 108
    gen.config.nz_global = 28
    gen.config.px = 3
    gen.config.py = 4
    gen.config.pz = 4
    gen.config.source_time_function = np.linspace(1.0, 0.0, 4000)
    gen.config.is_dissipative = False

    # Write the input files to a dictionary.
    input_files = gen.write(format="ses3d_4_0")
    for input_file in input_files.itervalues():
        assert input_file.endswith("\n\n") is True
def test_adding_sac_file_without_local_depth():
    """
    This file has no local depth. This should be ok.
    """
    sac_file = os.path.join(DATA, "example_without_local_depth.sac")
    gen = InputFileGenerator()
    gen.add_stations(sac_file)

    assert gen._stations[0]["id"] == "IU.ANMO"
    assert round(gen._stations[0]["latitude"] - 34.94598, 5) == 0
    assert round(gen._stations[0]["longitude"] - -106.45713, 5) == 0
    assert round(gen._stations[0]["elevation_in_m"] - 1671.0, 5) == 0
    # Local depth will be set to 0 in case it is not available.
    assert gen._stations[0]["local_depth_in_m"] == 0
def test_adding_a_single_station_as_JSON():
    """
    Asserts that a single station can be added as JSON.
    """
    station = {
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0}
    json_station = json.dumps(station)
    gen = InputFileGenerator()
    gen.add_stations(json_station)
    assert [station] == gen._stations
def test_station_dicts_with_invalid_information_raise():
    """
    Station dicts that have invalid types that cannot be converted should
    raise!
    """
    # All the coordinate values should be converted to floats.
    station = {"id": "BW.FURT",
               "latitude": "A",
               "longitude": 2,
               "elevation_in_m": 3,
               "local_depth_in_m": 4}
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_stations(station)
def test_adding_dict_with_missing_keys():
    """
    Tests the adding of a dictionary with missing keys.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    with pytest.raises(ValueError):
        gen.add_events(event)
def test_adding_station_as_list_of_dictionaries():
    """
    Checks that stations can also be passed as dictionaries.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0,
         "local_depth_in_m": 2.0}]
    gen = InputFileGenerator()
    gen.add_stations(stations)
    assert sorted(stations) == sorted(gen._stations)
def test_adding_stations_as_SEED_files_via_BytesIO():
    """
    Tests adding stations as SEED files.
    """
    seed_file = os.path.join(DATA, "dataless.seed.BW_FURT")

    with open(seed_file, "rb") as fh:
        seed_file_mem_file = io.BytesIO(fh.read())

    gen = InputFileGenerator()
    gen.add_stations(seed_file_mem_file)

    # Sort to be able to compare.
    assert gen._stations == \
        [{"id": "BW.FURT",
          "latitude": 48.162899,
          "longitude": 11.2752,
          "elevation_in_m": 565.0,
          "local_depth_in_m": 0.0}]
def test_adding_single_event_dictionary():
    """
    Adding a single event dictionary.
    """
    event = {
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "description": "Some description",
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}
    gen = InputFileGenerator()
    gen.add_events(event)
    assert gen._events == [event]
def test_adding_multiple_stations_as_JSON():
    """
    Tests that stations can be added as a JSON list.
    """
    stations = [{
        "id": "BW.FURT",
        "latitude": 48.162899,
        "longitude": 11.2752,
        "elevation_in_m": 565.0,
        "local_depth_in_m": 10.0},
        {"id": "BW.RJOB",
         "latitude": 47.737167,
         "longitude": 12.795714,
         "elevation_in_m": 860.0,
         "local_depth_in_m": 2.0}]
    json_stations = json.dumps(stations)
    gen = InputFileGenerator()
    gen.add_stations(json_stations)
    assert sorted(stations) == sorted(gen._stations)
def test_event_filter_removed_everything_without_an_id():
    """
    An applied event filter will remove all events without an id.
    """
    events = [{
        "latitude": 45.0,
        "longitude": 12.1,
        "depth_in_km": 13.0,
        "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18,
        "description": "Some description"
    }, {
        "latitude": 13.93,
        "longitude": -92.47,
        "depth_in_km": 28.7,
        "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
        "m_rr": 1.02e+20,
        "m_tt": -7.96e+19,
        "m_pp": -2.19e+19,
        "m_rt": 6.94e+19,
        "m_rp": -4.08e+19,
        "m_tp": 4.09e+19,
        "description": None}]
    gen = InputFileGenerator()
    gen.add_events(events)

    assert sorted(gen._filtered_events) == sorted(events)

    # Applying a filter will remove everything.
    gen.event_filter = ["smi://some/url"]
    assert sorted(gen._filtered_events) == []

    # Removing the filter should make the missing events reappear.
    gen.event_filter = None
    assert sorted(gen._filtered_events) == sorted(gen._events)
    gen.station_filter = []
    assert sorted(gen._filtered_events) == sorted(gen._events)
def test_event_dictionary_automatic_type_conversion():
    """
    The types for the event dictionary should also undergo automatic type
    conversion.
    """
    event = {
        "description": 1,
        "latitude": "1",
        "longitude": "2",
        "depth_in_km": "3",
        "origin_time": "2012-01-01T00:00:00.000000Z",
        "m_rr": "-2.11e+18",
        "m_tt": "-4.22e+19",
        "m_pp": "4.43e+19",
        "m_rt": "-9.35e+18",
        "m_rp": "-8.38e+18",
        "m_tp": "-6.44e+18"}
    gen = InputFileGenerator()
    gen.add_events(event)
    assert type(gen._events[0]["latitude"]) == float
    assert type(gen._events[0]["longitude"]) == float
    assert type(gen._events[0]["depth_in_km"]) == float
    assert type(gen._events[0]["origin_time"]) == obspy.UTCDateTime
    assert type(gen._events[0]["m_rr"]) == float
    assert type(gen._events[0]["m_tt"]) == float
    assert type(gen._events[0]["m_pp"]) == float
    assert type(gen._events[0]["m_rt"]) == float
    assert type(gen._events[0]["m_rp"]) == float
    assert type(gen._events[0]["m_tp"]) == float

    assert gen._events == [{
        "description": "1",
        "latitude": 1.0,
        "longitude": 2.0,
        "depth_in_km": 3.0,
        "origin_time": obspy.UTCDateTime(2012, 1, 1),
        "m_rr": -2.11e+18,
        "m_tt": -4.22e+19,
        "m_pp": 4.43e+19,
        "m_rt": -9.35e+18,
        "m_rp": -8.38e+18,
        "m_tp": -6.44e+18}]
def test_reading_QuakeML_from_BytesIO():
    """
    Tests the reading of QuakeML from BytesIO.
    """
    event_file_1 = os.path.join(DATA, "event1.xml")
    event_file_2 = os.path.join(DATA, "event2.xml")

    with open(event_file_1, "rb") as fh:
        event_file_1_mem = io.BytesIO(fh.read())

    with open(event_file_2, "rb") as fh:
        event_file_2_mem = io.BytesIO(fh.read())

    gen = InputFileGenerator()
    gen.add_events([event_file_1_mem, event_file_2_mem])

    # Sort to be able to compare.
    assert sorted(gen._events) == \
        [{"description": "FICTIONAL EVENT IN BAVARIA",
          "latitude": 45.0,
          "longitude": 12.1,
          "depth_in_km": 13.0,
          "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000),
          "m_rr": -2.11e+18,
          "m_tt": -4.22e+19,
          "m_pp": 4.43e+19,
          "m_rt": -9.35e+18,
          "m_rp": -8.38e+18,
          "m_tp": -6.44e+18,
          "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"},
         {"description": "GUATEMALA",
          "latitude": 13.93,
          "longitude": -92.47,
          "depth_in_km": 28.7,
          "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000),
          "m_rr": 1.02e+20,
          "m_tt": -7.96e+19,
          "m_pp": -2.19e+19,
          "m_rt": 6.94e+19,
          "m_rp": -4.08e+19,
          "m_tp": 4.09e+19,
          "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}]