def test_station_filter(): """ Tests the filtering of the stations. """ stations = [ {"id": "HT.HORT", "latitude": 40.5978, "longitude": 23.0995, "elevation_in_m": 925.0, "local_depth_in_m": 0.0}, {"id": "HT.LIT", "latitude": 40.1003, "longitude": 22.489, "elevation_in_m": 568.0, "local_depth_in_m": 0.0}, {"id": "HT.PAIG", "latitude": 39.9363, "longitude": 23.6768, "elevation_in_m": 213.0, "local_depth_in_m": 0.0}, {"id": "HT.SOH", "latitude": 40.8206, "longitude": 23.3556, "elevation_in_m": 728.0, "local_depth_in_m": 0.0}, {"id": "AA.THE", "latitude": 40.6319, "longitude": 22.9628, "elevation_in_m": 124.0, "local_depth_in_m": 0.0}, {"id": "BL.XOR", "latitude": 39.366, "longitude": 23.192, "elevation_in_m": 500.0, "local_depth_in_m": 0.0}] gen = InputFileGenerator() gen.add_stations(stations) # No applied filter should just result in the same stations being available # everywhere. assert sorted(gen._filtered_stations) == sorted(gen._stations) # Wildcards are ok. gen.station_filter = ["HT.*", "AA.*"] # Only the last stations should not be available. assert sorted(gen._filtered_stations) == sorted(stations[:-1]) # Removing the filter should make the missing stations reappear. gen.station_filter = None assert sorted(gen._filtered_stations) == sorted(gen._stations) gen.station_filter = [] assert sorted(gen._filtered_stations) == sorted(gen._stations)
def test_station_filter(): """ Tests the filtering of the stations. """ stations = [ {"id": "HT.HORT", "latitude": 40.5978, "longitude": 23.0995, "elevation_in_m": 925.0, "local_depth_in_m": 0.0}, {"id": "HT.LIT", "latitude": 40.1003, "longitude": 22.489, "elevation_in_m": 568.0, "local_depth_in_m": 0.0}, {"id": "HT.PAIG", "latitude": 39.9363, "longitude": 23.6768, "elevation_in_m": 213.0, "local_depth_in_m": 0.0}, {"id": "HT.SOH", "latitude": 40.8206, "longitude": 23.3556, "elevation_in_m": 728.0, "local_depth_in_m": 0.0}, {"id": "AA.THE", "latitude": 40.6319, "longitude": 22.9628, "elevation_in_m": 124.0, "local_depth_in_m": 0.0}, {"id": "BL.XOR", "latitude": 39.366, "longitude": 23.192, "elevation_in_m": 500.0, "local_depth_in_m": 0.0}] gen = InputFileGenerator() gen.add_stations(stations) # No applied filter should just result in the same stations being available # everywhere. assert sorted(gen._filtered_stations) == sorted(gen._stations) # Wildcards are ok. gen.station_filter = ["HT.*", "AA.*"] # Only the last stations should not be available. assert sorted(gen._filtered_stations) == sorted(stations[:-1]) # Removing the filter should make the missing stations reappear. gen.station_filter = None assert sorted(gen._filtered_stations) == sorted(gen._stations) gen.station_filter = [] assert sorted(gen._filtered_stations) == sorted(gen._stations)
def test_station_filter_JSON(): """ station filters can be set as JSON. """ filters = ["BW.HH*", "NE.*"] gen = InputFileGenerator() gen.station_filter = json.dumps(filters) assert gen.station_filter == filters
def test_station_filter_JSON(): """ station filters can be set as JSON. """ filters = ["BW.HH*", "NE.*"] gen = InputFileGenerator() gen.station_filter = json.dumps(filters) assert gen.station_filter == filters
def test_event_filter(): """ Tests the filtering of the events. This is funky. If any filter is given, it will remove all events that do not have an event_id. """ events = \ [{"latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18, "description": "FICTIONAL EVENT IN BAVARIA", "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}, {"latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19, "description": "GUATEMALA", "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}] event_file_1 = os.path.join(DATA, "event1.xml") event_file_2 = os.path.join(DATA, "event2.xml") gen = InputFileGenerator() gen.add_events([event_file_1, event_file_2]) assert sorted(gen._events) == sorted(events) # No applied filter should just result in the same stations being available # everywhere. assert sorted(gen._filtered_events) == sorted(gen._events) # Events filters are a simple list of URLS. gen.event_filter = ["smi:local/Event/2013-01-07T13:58:41.209477"] # Only the last event should now be available. assert sorted(gen._filtered_events) == sorted(events[1:]) # Removing the filter should make the missing events reappear. gen.event_filter = None assert sorted(gen._filtered_events) == sorted(gen._events) gen.station_filter = [] assert sorted(gen._filtered_events) == sorted(gen._events)
def test_event_filter(): """ Tests the filtering of the events. This is funky. If any filter is given, it will remove all events that do not have an event_id. """ events = \ [{"latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18, "description": "FICTIONAL EVENT IN BAVARIA", "_event_id": "smi:local/Event/2013-01-05T20:19:58.727909"}, {"latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19, "description": "GUATEMALA", "_event_id": "smi:local/Event/2013-01-07T13:58:41.209477"}] event_file_1 = os.path.join(DATA, "event1.xml") event_file_2 = os.path.join(DATA, "event2.xml") gen = InputFileGenerator() gen.add_events([event_file_1, event_file_2]) assert sorted(gen._events) == sorted(events) # No applied filter should just result in the same stations being available # everywhere. assert sorted(gen._filtered_events) == sorted(gen._events) # Events filters are a simple list of URLS. gen.event_filter = ["smi:local/Event/2013-01-07T13:58:41.209477"] # Only the last event should now be available. assert sorted(gen._filtered_events) == sorted(events[1:]) # Removing the filter should make the missing events reappear. gen.event_filter = None assert sorted(gen._filtered_events) == sorted(gen._events) gen.station_filter = [] assert sorted(gen._filtered_events) == sorted(gen._events)
def test_event_filter_removed_everything_without_an_id(): """ An applied event filter will remove all events without an id. """ events = [{ "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18, "description": "Some description" }, { "latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19, "description": None}] gen = InputFileGenerator() gen.add_events(events) assert sorted(gen._filtered_events) == sorted(events) # Applying a filter will remove everything. gen.event_filter = ["smi://some/url"] assert sorted(gen._filtered_events) == [] # Removing the filter should make the missing events reappear. gen.event_filter = None assert sorted(gen._filtered_events) == sorted(gen._events) gen.station_filter = [] assert sorted(gen._filtered_events) == sorted(gen._events)
def test_event_filter_removed_everything_without_an_id(): """ An applied event filter will remove all events without an id. """ events = [{ "latitude": 45.0, "longitude": 12.1, "depth_in_km": 13.0, "origin_time": obspy.UTCDateTime(2012, 4, 12, 7, 15, 48, 500000), "m_rr": -2.11e+18, "m_tt": -4.22e+19, "m_pp": 4.43e+19, "m_rt": -9.35e+18, "m_rp": -8.38e+18, "m_tp": -6.44e+18, "description": "Some description" }, { "latitude": 13.93, "longitude": -92.47, "depth_in_km": 28.7, "origin_time": obspy.UTCDateTime(2012, 11, 7, 16, 35, 55, 200000), "m_rr": 1.02e+20, "m_tt": -7.96e+19, "m_pp": -2.19e+19, "m_rt": 6.94e+19, "m_rp": -4.08e+19, "m_tp": 4.09e+19, "description": None}] gen = InputFileGenerator() gen.add_events(events) assert sorted(gen._filtered_events) == sorted(events) # Applying a filter will remove everything. gen.event_filter = ["smi://some/url"] assert sorted(gen._filtered_events) == [] # Removing the filter should make the missing events reappear. gen.event_filter = None assert sorted(gen._filtered_events) == sorted(gen._events) gen.station_filter = [] assert sorted(gen._filtered_events) == sorted(gen._events)
def compute(self): gen = InputFileGenerator() userconf = json.load(open(self.parameters["solver_conf_file"])) fields = userconf["fields"] for x in fields: gen.add_configuration({x["name"]:self.strToBool(x["value"])}) with open (self.parameters["quakeml"], "r") as events: quakeml=events.read() #unicode_qml=quakeml.decode('utf-8') #data = unicode_qml.encode('ascii','ignore') ## cat=readQuakeML(quakeml) events = [] #cat = obspy.readEvents(data) #Remove all events with no moment tensor. for event in cat: for fm in event.focal_mechanisms: if fm.moment_tensor and fm.moment_tensor.tensor: events.append(event) break cat.events = events gen.add_events(cat) evn=0 outputdir="" for x in userconf["events"]: gen.event_filter=[x] if self.parameters["station_format"]=="stationXML": gen.add_stations(self.parameters["stations_file"]) if self.parameters["station_format"]=="points": stlist = [] with open(self.parameters["stations_file"]) as f: k=False for line in f: if (k==False): k=True else: station={} l=line.strip().split(" ") station.update({"id":l[1]+"."+l[0]}) station.update({"latitude":float(l[3])}) station.update({"longitude":float(l[2])}) station.update({"elevation_in_m":float(l[4])}) station.update({"local_depth_in_m":float(l[5])}) stlist.append(station) gen.add_stations(stlist) gen.station_filter = userconf["stations"] outputdir=self.outputdest+userconf["runId"]+"/"+userconf["runId"]+"_"+str(evn)+"/DATA" output_files = gen.write(format=userconf["solver"], output_dir=outputdir) locations = [] for x in output_files.keys(): locations.append("file://"+socket.gethostname()+outputdir+"/"+x) self.addOutput(gen._filtered_events,location=locations,metadata=self.extractEventMetadata(outputdir,gen._filtered_events),control={"con:immediateAccess":"true"}) evn+=1 self.addOutput(outputdir,location=locations,metadata={"to_xdecompose":str(outputdir)},control={"con:immediateAccess":"true"})
def compute(self): gen = InputFileGenerator() userconf = json.load(open(self.parameters["solver_conf_file"])) fields = userconf["fields"] for x in fields: gen.add_configuration({x["name"]: self.strToBool(x["value"])}) with open(self.parameters["quakeml"], "r") as events: quakeml = events.read() #unicode_qml=quakeml.decode('utf-8') #data = unicode_qml.encode('ascii','ignore') ## cat = readQuakeML(quakeml) events = [] #cat = obspy.readEvents(data) #Remove all events with no moment tensor. for event in cat: for fm in event.focal_mechanisms: if fm.moment_tensor and fm.moment_tensor.tensor: events.append(event) break cat.events = events gen.add_events(cat) evn = 0 outputdir = "" for x in userconf["events"]: gen.event_filter = [x] if self.parameters["station_format"] == "stationXML": gen.add_stations(self.parameters["stations_file"]) if self.parameters["station_format"] == "points": stlist = [] with open(self.parameters["stations_file"]) as f: k = False for line in f: if (k == False): k = True else: station = {} l = line.strip().split(" ") station.update({"id": l[1] + "." + l[0]}) station.update({"latitude": float(l[3])}) station.update({"longitude": float(l[2])}) station.update({"elevation_in_m": float(l[4])}) station.update({"local_depth_in_m": float(l[5])}) stlist.append(station) gen.add_stations(stlist) gen.station_filter = userconf["stations"] outputdir = self.outputdest + userconf["runId"] + "/" + userconf[ "runId"] + "_" + str(evn) + "/DATA" output_files = gen.write(format=userconf["solver"], output_dir=outputdir) locations = [] for x in output_files.keys(): locations.append("file://" + socket.gethostname() + outputdir + "/" + x) self.addOutput(gen._filtered_events, location=locations, metadata=self.extractEventMetadata( outputdir, gen._filtered_events), control={"con:immediateAccess": "true"}) evn += 1 self.addOutput(outputdir, location=locations, metadata={"to_xdecompose": str(outputdir)}, control={"con:immediateAccess": "true"})