def getCatData(date, opt): """ Download data from IRIS or Earthworm waveserver with padding and filter it. This is a specialized version getData() for catalog events, pulling a smaller amount of time around a known event. date: UTCDateTime of known catalog event opt: Options object describing station/run parameters Returns ObsPy stream object """ # Choose where data are downloaded automatically via options # Download data with padding to account for triggering algorithm # Make overlap symmetric if opt.server == "IRIS": client = Client("IRIS") st = client.get_waveforms(opt.network, opt.station, opt.location, opt.channel, date - opt.atrig, date + 3*opt.atrig) else: client = EWClient(opt.server, opt.port) st = client.getWaveform(opt.network, opt.station, opt.location, opt.channel, date - opt.atrig, date + 3*opt.atrig) st = st.detrend() # can create noise artifacts?? st = st.merge(method=1, fill_value='interpolate') st = st.filter("highpass", freq=opt.fhigh, corners=2, zerophase=True) return st
def getIRIS(date, opt, nsec=86400): """ Download data from IRIS with padding and filter it. date: UTCDateTime of beginning of period of interest opt: Options object describing station/run parameters nsec: Number of seconds to download without padding (default 86400 s, or 1 day) Returns ObsPy stream object """ client = Client("IRIS") # Download data with padding to account for triggering algorithm st = client.get_waveforms(opt.network, opt.station, opt.location, opt.channel, date - opt.ptrig, date + nsec + opt.atrig) st = st.detrend() # can create noise artifacts?? st = st.merge(method=1, fill_value='interpolate') st = st.filter("highpass", freq=opt.fhigh, corners=2, zerophase=True) return st
def getIRIS( date, sta, chan, net, loc="--", nsec=86400, ptrig=10.0, atrig=20.0, fmin=1.0, fmax=10.0): """ Download data from IRIS with padding and filter it. date: UTCDateTime of beginning of period of interest sta: String of station chan: String of channel net: String of network loc: String of location (default "--") nsec: Number of seconds to download without padding (default 86400 s, or 1 day) ptrig: Length of window to keep prior to trigger (default 10.0 s) atrig: Length of window to keep after trigger (default 20.0 s) fmin: Lower bound of bandpass filter (default 1.0 Hz) fmax: Upper bound of bandpass filter (default 10.0 Hz) Returns ObsPy stream object """ client = Client("IRIS") # Download data with padding to account for triggering algorithm st = client.get_waveforms( net, sta, loc, chan, date - ptrig, date + nsec + atrig) st = st.detrend() st = st.merge(method=1, fill_value=0) st = st.filter("bandpass", freqmin=fmin, freqmax=fmax, corners=2, zerophase=True) return st
def get_event_info(starttime, endtime, streams): events = [] arrivals = {} try: client = FDSNClient("NERIES") events = client.get_events(starttime=starttime - 20 * 60, endtime=endtime) for ev in events[::-1]: has_arrivals = False origin = ev.origins[0] origin_time = origin.time lon1 = origin.longitude lat1 = origin.latitude depth = abs(origin.depth / 1e3) for st in streams: sta = st[0].stats.station lon2 = st[0].stats.coordinates['longitude'] lat2 = st[0].stats.coordinates['latitude'] dist = locations2degrees(lat1, lon1, lat2, lon2) tts = getTravelTimes(dist, depth) list_ = arrivals.setdefault(sta, []) for tt in tts: tt['time'] = origin_time + tt['time'] if starttime < tt['time'] < endtime: has_arrivals = True list_.append(tt) if not has_arrivals: events[:] = events[:-1] except Exception as e: msg = ("Problem while fetching events or determining theoretical " "phases: %s: %s" % (e.__class__.__name__, str(e))) return None, None, msg return events, arrivals, None
def setUp(self): # directory where the test files are located self.path = os.path.dirname(__file__) self.datapath = os.path.join(self.path, "data") self.client = Client(base_url="IRIS", user_agent=USER_AGENT) self.client_auth = \ Client(base_url="IRIS", user_agent=USER_AGENT, user="******", password="******")
def get_events(): try: return readEvents(evname) except: pass client = Client() events = client.get_events(starttime=t1, endtime=t2, latitude=lat, longitude=lon, minradius=20, maxradius=100, minmagnitude=6.) events.write(evname, 'QUAKEML') return events
def get_events(): print('Read event file') try: return readEvents(evname) except: pass client = FSDNClient('NERIES') events = client.get_events(**event_kwargs) events.events.sort(key=lambda e: e.origins[0].time) events.write(evname, 'QUAKEML') return events
def get_events(self): """ Method to fetch updated list of events to use in plot. """ with self.lock: start = min([tr.stats.starttime for tr in self.stream]) end = max([tr.stats.endtime for tr in self.stream]) neries_client = Client("NERIES") events = neries_client.get_events(starttime=start, endtime=end, minmagnitude=self.args.events) return events
def getepidata(event_lat, event_lon, event_time, tstart=-5., tend=200., minradiuskm=0., maxradiuskm=20., channels='*', location='*', source='IRIS'): """ Automatically pull existing data within a certain distance of the epicenter (or any lat/lon coordinates) and attach station coordinates to data USAGE st = getepidata(event_lat, event_lon, event_time, tstart=-5., tend=200., minradiuskm=0., maxradiuskm=20., channels='*', location='*', source='IRIS') INPUTS event_lat = latitude of event in decimal degrees event_lon = longitude of event in decimal degrees event_time = Event time in UTC in any format obspy's UTCDateTime can parse - e.g. '2016-02-05T19:57:26' tstart = number of seconds to add to event time for start time of data (use negative number to start before event_time) tend = number of seconds to add to event time for end time of data radiuskm = radius to search for data channels = 'strong motion' to get all strong motion channels (excluding low sample rate ones), 'broadband' to get all broadband instruments, 'short period' for all short period channels, otherwise a single line of comma separated channel codes, * wildcards are okay, e.g. channels = '*N*,*L*' location = comma separated list of location codes allowed, or '*' for all location codes source = FDSN source, 'IRIS', 'NCEDC', 'GEONET' etc., see list here http://docs.obspy.org/archive/0.10.2/packages/obspy.fdsn.html OUTPUTS st = obspy stream containing data from within requested area """ event_time = UTCDateTime(event_time) client = FDSN_Client(source) if channels.lower() == 'strong motion': channels = 'EN*,HN*,BN*,EL*,HL*,BL*' elif channels.lower() == 'broadband': channels = 'BH*,HH*' elif channels.lower() == 'short period': channels = 'EH*' else: channels = channels.replace(' ', '') # Get rid of spaces t1 = UTCDateTime(event_time) + tstart t2 = UTCDateTime(event_time) + tend inventory = client.get_stations(latitude=event_lat, longitude=event_lon, minradius=minradiuskm/111.32, maxradius=maxradiuskm/111.32, channel=channels, level='channel', startbefore=t1, endafter=t2) temp = inventory.get_contents() netnames = temp['networks'] stas = temp['stations'] stanames = [n.split('.')[1].split()[0] for n in stas] st = getdata(','.join(unique_list(netnames)), ','.join(unique_list(stanames)), location, channels, t1, t2, attach_response=True, clientname=source) if st is None: print('No data returned') return for trace in st: try: coord = inventory.get_coordinates(trace.id) trace.stats.coordinates = AttribDict({'latitude': coord['latitude'], 'longitude': coord['longitude'], 'elevation': coord['elevation']}) except: print('Could not attach coordinates for %s' % trace.id) return st
def __init__(self, *args, **kwargs): """ setupClass() would be better suited for the task at hand but is not supported by Python 2.6. """ super(ClientTestCase, self).__init__(*args, **kwargs) # directory where the test files are located self.path = os.path.dirname(__file__) self.datapath = os.path.join(self.path, "data") self.client = Client(base_url="IRIS", user_agent=USER_AGENT) self.client_auth = Client(base_url="IRIS", user_agent=USER_AGENT, user="******", password="******")
def queryData(self): # code from IRIS client #Here we pull the data client = Client("IRIS") DupStations = [] DupLocations = [] DupChannels = [] self.STAWILD = False self.LOCWILD = False self.CHANWILD = False try: requestArray = [(self.network,self.station,self.location, \ self.channel,self.startTime,self.endTime)] print if self.debug: print(requestArray) print self.st = client.get_waveforms_bulk(requestArray) for self.tr in self.st: #Here we remove the M data quality and go with D self.tr.stats.mseed['dataquality'] = 'D' if self.debug: #print "Here is a trace we have" #print(tr.stats) if self.station == '*': self.STAWILD = True DupStations.append(self.tr.stats.station) elif self.station != '*': self.STAWILD = False if self.location == '*': self.LOCWILD = True DupLocations.append(self.tr.stats.location) elif self.location != '*': self.LOCWILD = False if self.channel == '*': self.CHANWILD = True DupChannels.append(self.tr.stats.channel) elif self.channel != '*': self.CHANWILD = False except: print 'Trouble getting data' sys.exit(0) #takes duplicate stations out of list self.stations = list(set(DupStations)) self.locations = list(set(DupLocations)) self.channels = list(set(DupChannels)) print self.stations print self.locations print self.channels # Now call code to store streams in mseed files self.storeMSEED()
def get_inventory(): try: return read_inventory(invname) except: pass client = Client('GFZ') net, sta, loc, cha = seed_id.split('.') inv = client.get_stations(starttime=t1, endtime=t2, network=net, station=sta, location=loc, channel=cha, level='channel') # latitude=lat, longitude=lon, maxradius=10) inv.write(invname, 'STATIONXML') return inv
def data_download(stations, starttime, endtime, event_name): print "\n========================================" print "event:", event_name print "time:", starttime, endtime waveforms_folder = "waveforms/" + event_name stationxml_folder = "stationxml/" + event_name c = Client("IRIS") if not os.path.exists(waveforms_folder): os.makedirs(waveforms_folder) if not os.path.exists(stationxml_folder): os.makedirs(stationxml_folder) for network, station in stations: ### First download waveforms. filename = os.path.join(waveforms_folder, "%s.%s.mseed" % (network, station)) if os.path.exists(filename): continue try: c.get_waveforms(network=network, station=station, location="*", channel="BH?", starttime=starttime, endtime=endtime, filename=filename) except Exception as e: print("Failed to download %s.%s due to %s" % (network, station, str(e))) continue print("Successfully downloaded %s." % filename) ### ### Then download stationxml file stationxml_filename = os.path.join(stationxml_folder, "%s.%s.xml" % (network, station)) if os.path.exists(stationxml_filename): continue try: c.get_stations(network=network, station=station, location="*", channel="BH?", starttime=starttime, endtime=endtime, filename=stationxml_filename, level="response") except Exception as e: print("Failed to download %s.%s StationXML due to %s" % ( network, station, str(e))) continue print("Successfully downloaded %s." % stationxml_filename)
def get_inventory(): print('Read inventory file') try: return read_inventory(invname, 'STATIONXML') except: pass print('Create inventory file...') client = FSDNClient('ORFEUS') inv = client.get_stations(**inventory_kwargs) for net in inv: for sta in net[:]: if sta.code not in stations: net.stations.remove(sta) inv.write(invname, 'STATIONXML') return inv
def test_gmt_catalog(): (options, args, parser) = command_parse() input_dics = read_input_command(parser) # Changing the input_dics values for testing input_dics['min_date'] = UTCDateTime('2011-03-01') input_dics['max_date'] = UTCDateTime('2011-03-20') input_dics['min_mag'] = 8.9 evlatmin = input_dics['evlatmin'] evlatmax = input_dics['evlatmax'] evlonmin = input_dics['evlonmin'] evlonmax = input_dics['evlonmax'] evlat = input_dics['evlat'] evlon = input_dics['evlon'] evradmax = input_dics['evradmax'] evradmin = input_dics['evradmin'] client_fdsn = Client_fdsn(base_url=input_dics['event_catalog']) events_QML = client_fdsn.get_events( minlatitude=evlatmin, maxlatitude=evlatmax, minlongitude=evlonmin, maxlongitude=evlonmax, latitude=evlat, longitude=evlon, maxradius=evradmax, minradius=evradmin, mindepth=input_dics['min_depth'], maxdepth=input_dics['max_depth'], starttime=input_dics['min_date'], endtime=input_dics['max_date'], minmagnitude=input_dics['min_mag'], maxmagnitude=input_dics['max_mag'], orderby='time', catalog=None, magnitudetype=input_dics['mag_type']) assert events_QML[0].preferred_origin().latitude == 38.2963 assert events_QML[0].preferred_origin().longitude == 142.498 assert events_QML[0].preferred_origin().depth == 19700.0
def getSlowestStation(lat,lon,depth,calc): client = Client("IRIS") inventory = client.get_stations(latitude=lat, longitude=lon,maxradius=1.5) lats = [] lons = [] codes = [] for network in inventory.networks: for station in network.stations: lats.append(station.latitude) lons.append(station.longitude) codes.append(station.code) lats = np.array(lats) lons = np.array(lons) codes = np.array(codes) distances = [] times = [] for i in range(0,len(lats)): slat = lats[i] slon = lons[i] distance = locations2degrees(lat,lon,slat,slon) distances.append(distance) ptime,stime = calc.getTravelTimes(distance,depth) times.append(ptime) times = np.array(times) distances = np.array(distances) sortidx = np.argsort(distances) distances = distances[sortidx] times = times[sortidx] lats = lats[sortidx] lons = lons[sortidx] codes = codes[sortidx] distances = distances[0:4] times = times[0:4] + TELEMETRY_DELAY + PROCESSING_DELAY lats = lats[0:4] lons = lons[0:4] codes = codes[0:4] idx = times.argmax() sdict = {'lat':lats[idx],'lon':lons[idx],'time':times[idx],'code':codes[idx]} return sdict
import numpy as np import obspy from obspy.fdsn import Client from obspy import UTCDateTime import matplotlib.pyplot as plt import matplotlib.cm as cm import matplotlib as mpl import matplotlib.gridspec as gridspec from mpl_toolkits.basemap import Basemap from matplotlib import animation # # Set up time window and filter bands # client = Client("IRIS") origintime = UTCDateTime("2011-08-23T17:51:00.000") starttime = UTCDateTime("2011-08-23T17:57:00.000") endtime = starttime+300 #starttime = origintime + 200 #endtime = starttime+500 #starttime = UTCDateTime("2013-05-24T05:54:49") #endtime = UTCDateTime("2013-05-24T06:29:49") netlist = "TA" stalist = "*" loclist = "" chanlist = "LHZ" #pre_filt = (0.001, 0.006, 0.2, 0.4) pre_filt = (0.001, 0.002, 0.2, 0.4) #fmin = 0.006 fmin = 0.002
def test_setting_service_major_version(self, download_url_mock): """ Test the setting of custom major versions. """ download_url_mock.return_value = (404, None) base_url = "http://example.com" # Passing an empty dictionary results in the default urls. major_versions = {} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/1/contributors" % base_url, "%s/fdsnws/event/1/catalogs" % base_url, "%s/fdsnws/event/1/application.wadl" % base_url, "%s/fdsnws/station/1/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace all download_url_mock.reset_mock() download_url_mock.return_value = (404, None) major_versions = {"event": 7, "station": 8, "dataselect": 9} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/7/contributors" % base_url, "%s/fdsnws/event/7/catalogs" % base_url, "%s/fdsnws/event/7/application.wadl" % base_url, "%s/fdsnws/station/8/application.wadl" % base_url, "%s/fdsnws/dataselect/9/application.wadl" % base_url, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace only some download_url_mock.reset_mock() download_url_mock.return_value = (404, None) major_versions = {"event": 7, "station": 8} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/7/contributors" % base_url, "%s/fdsnws/event/7/catalogs" % base_url, "%s/fdsnws/event/7/application.wadl" % base_url, "%s/fdsnws/station/8/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls)
def queryData(self): # code from IRIS client # Here we pull the data client = Client("IRIS") DupStations = [] DupLocations = [] DupChannels = [] self.st = Stream() self.STAWILD = False self.LOCWILD = False self.CHANWILD = False try: timeout = 300 socket.setdefaulttimeout(timeout) # this needs to have a get_waveform that queries data 1 hour at a time # data cant query right now if the data is too bulky # also needs to include a timeout exception for hourIndex in range( 0, self.hours): #this cant be days... has to be hours self.startTime1 = self.startTime + (hourIndex) * 1 * 60 * 60 self.endTime1 = self.startTime + (hourIndex + 1) * 1 * 60 * 60 requestArray = [(self.network,self.station,self.location, \ self.channel,self.startTime1,self.endTime1)] self.st1 = client.get_waveforms_bulk(requestArray) self.st += self.st1 print self.st print #self.st = client.get_waveforms_bulk(timeout=10,requestArray) for self.tr in self.st: #Here we remove the M data quality and go with D self.tr.stats.mseed['dataquality'] = 'D' if self.debug: if self.station == '*': self.STAWILD = True DupStations.append(self.tr.stats.station) elif self.station != '*': self.STAWILD = False if self.location == '*': self.LOCWILD = True DupLocations.append(self.tr.stats.location) elif self.location != '*': self.LOCWILD = False if self.channel == '*': self.CHANWILD = True DupChannels.append(self.tr.stats.channel) elif self.channel != '*': self.CHANWILD = False #except TimeoutError: #print 'Get waveform timeout, exiting...' #sys.exit(0) except: print 'Trouble getting data' sys.exit(0) # Takes duplicate stations out of list and # makes station, location, and channel into an array # for looping( probably easier way but it works) self.stations = list(set(DupStations)) if self.station != '*': self.stations.append(self.station) self.locations = list(set(DupLocations)) if self.location != '*': self.locations.append(self.location) self.channels = list(set(DupChannels)) if self.channel != '*': self.channels.append(self.channel) print print "Station(s) being pulled: " + str(self.stations) print "Location(s) being pulled: " + str(self.locations) print "Channel(s) being pulled: " + str(self.channels) # Now call code to store streams in mseed files self.storeMSEED()
class ClientTestCase(unittest.TestCase): """ Test cases for obspy.fdsn.client.Client. """ def setUp(self): # directory where the test files are located self.path = os.path.dirname(__file__) self.datapath = os.path.join(self.path, "data") self.client = Client(base_url="IRIS", user_agent=USER_AGENT) self.client_auth = \ Client(base_url="IRIS", user_agent=USER_AGENT, user="******", password="******") def test_url_building(self): """ Tests the build_url() functions. """ # Application WADL self.assertEqual( build_url("http://service.iris.edu", 1, "dataselect", "application.wadl"), "http://service.iris.edu/fdsnws/dataselect/1/application.wadl") self.assertEqual( build_url("http://service.iris.edu", 1, "event", "application.wadl"), "http://service.iris.edu/fdsnws/event/1/application.wadl") self.assertEqual( build_url("http://service.iris.edu", 1, "station", "application.wadl"), "http://service.iris.edu/fdsnws/station/1/application.wadl") # Test one parameter. self.assertEqual( build_url("http://service.iris.edu", 1, "dataselect", "query", {"network": "BW"}), "http://service.iris.edu/fdsnws/dataselect/1/query?network=BW") self.assertEqual( build_url("http://service.iris.edu", 1, "dataselect", "queryauth", {"network": "BW"}), "http://service.iris.edu/fdsnws/dataselect/1/queryauth?network=BW") # Test two parameters. Note random order, two possible results. self.assertTrue( build_url("http://service.iris.edu", 1, "dataselect", "query", {"net": "A", "sta": "BC"}) in ("http://service.iris.edu/fdsnws/dataselect/1/query?net=A&sta=BC", "http://service.iris.edu/fdsnws/dataselect/1/query?sta=BC&net=A")) # A wrong resource_type raises a ValueError self.assertRaises(ValueError, build_url, "http://service.iris.edu", 1, "obspy", "query") def test_url_building_with_auth(self): """ Tests the Client._build_url() method with authentication. Necessary on top of test_url_building test case because clients with authentication have to build different URLs for dataselect. """ # no authentication got = self.client._build_url("dataselect", "query", {'net': "BW"}) expected = "http://service.iris.edu/fdsnws/dataselect/1/query?net=BW" self.assertEqual(got, expected) # with authentication got = self.client_auth._build_url("dataselect", "query", {'net': "BW"}) expected = ("http://service.iris.edu/fdsnws/dataselect/1/" "queryauth?net=BW") self.assertEqual(got, expected) def test_service_discovery_iris(self): """ Tests the automatic discovery of services with the IRIS endpoint. The test parameters are taken from IRIS' website. This will have to be adjusted once IRIS changes their implementation. """ client = self.client self.assertEqual(set(client.services.keys()), set(("dataselect", "event", "station", "available_event_contributors", "available_event_catalogs"))) # The test sets are copied from the IRIS webpage. self.assertEqual( set(client.services["dataselect"].keys()), set(("starttime", "endtime", "network", "station", "location", "channel", "quality", "minimumlength", "longestonly"))) self.assertEqual( set(client.services["station"].keys()), set(("starttime", "endtime", "startbefore", "startafter", "endbefore", "endafter", "network", "station", "location", "channel", "minlatitude", "maxlatitude", "minlongitude", "maxlongitude", "latitude", "longitude", "minradius", "maxradius", "level", "includerestricted", "includeavailability", "updatedafter", "matchtimeseries"))) self.assertEqual( set(client.services["event"].keys()), set(("starttime", "endtime", "minlatitude", "maxlatitude", "minlongitude", "maxlongitude", "latitude", "longitude", "maxradius", "minradius", "mindepth", "maxdepth", "minmagnitude", "maxmagnitude", "magtype", # XXX: Change once fixed. "catalog", "contributor", "limit", "offset", "orderby", "updatedafter", "includeallorigins", "includeallmagnitudes", "includearrivals", "eventid", "originid" # XXX: This is currently just specified in the # WADL. ))) # Also check an exemplary value in more detail. minradius = client.services["event"]["minradius"] self.assertEqual(minradius["default_value"], 0.0) self.assertEqual(minradius["required"], False) self.assertEqual(minradius["doc"], "") self.assertEqual(minradius["doc_title"], "Specify minimum distance " "from the geographic point defined by latitude and " "longitude") self.assertEqual(minradius["type"], float) self.assertEqual(minradius["options"], []) def test_IRIS_event_catalog_availability(self): """ Tests the parsing of the available event catalogs. """ self.assertEqual(set(self.client.services["available_event_catalogs"]), set(("ANF", "GCMT", "TEST", "ISC", "UofW", "NEIC PDE"))) def test_IRIS_event_contributors_availability(self): """ Tests the parsing of the available event contributors. """ self.assertEqual(set( self.client.services["available_event_contributors"]), set(("University of Washington", "ANF", "GCMT", "GCMT-Q", "ISC", "NEIC ALERT", "NEIC PDE-W", "UNKNOWN", "NEIC PDE-M", "NEIC PDE-Q"))) def test_simple_XML_parser(self): """ Tests the simple XML parsing helper function. """ catalogs = parse_simple_xml(""" <?xml version="1.0"?> <Catalogs> <total>6</total> <Catalog>ANF</Catalog> <Catalog>GCMT</Catalog> <Catalog>TEST</Catalog> <Catalog>ISC</Catalog> <Catalog>UofW</Catalog> <Catalog>NEIC PDE</Catalog> </Catalogs>""") self.assertEqual(catalogs, {"catalogs": set(("ANF", "GCMT", "TEST", "ISC", "UofW", "NEIC PDE"))}) def test_IRIS_example_queries(self): """ Tests the (sometimes modified) example queries given on IRIS webpage. """ client = self.client # event example queries queries = [ dict(eventid=609301), dict(starttime=UTCDateTime("2011-01-07T01:00:00"), endtime=UTCDateTime("2011-01-07T02:00:00"), catalog="NEIC PDE"), dict(starttime=UTCDateTime("2011-01-07T14:00:00"), endtime=UTCDateTime("2011-01-08T00:00:00"), minlatitude=15, maxlatitude=40, minlongitude=-170, maxlongitude=170, includeallmagnitudes=True, minmagnitude=4, orderby="magnitude"), ] result_files = ["events_by_eventid.xml", "events_by_time.xml", "events_by_misc.xml", ] for query, filename in zip(queries, result_files): got = client.get_events(**query) file_ = os.path.join(self.datapath, filename) #got.write(file_, "QUAKEML") expected = readEvents(file_) self.assertEqual(got, expected, failmsg(got, expected)) # station example queries queries = [ dict(latitude=-56.1, longitude=-26.7, maxradius=15), dict(startafter=UTCDateTime("2003-01-07"), endbefore=UTCDateTime("2011-02-07"), minlatitude=15, maxlatitude=55, minlongitude=170, maxlongitude=-170), dict(starttime=UTCDateTime("2013-01-01"), network="IU", sta="ANMO", level="channel"), dict(starttime=UTCDateTime("2013-01-01"), network="IU", sta="A*", location="00", level="channel", format="text"), ] result_files = ["stations_by_latlon.xml", "stations_by_misc.xml", "stations_by_station.xml", "stations_by_station_wildcard.xml", ] for query, filename in zip(queries, result_files): got = client.get_stations(**query) file_ = os.path.join(self.datapath, filename) #with open(file_, "wt") as fh: # fh.write(got) with open(file_) as fh: expected = fh.read() msg = failmsg(got, expected, ignore_lines=['<Created>']) self.assertEqual(msg, "", msg) # dataselect example queries queries = [ ("IU", "ANMO", "00", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:40:00.000")), ("IU", "A*", "*", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:31:00.000")), ("IU", "A??", "*0", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:31:00.000")), ] result_files = ["dataselect_example.mseed", "dataselect_example_wildcards.mseed", "dataselect_example_mixed_wildcards.mseed", ] for query, filename in zip(queries, result_files): got = client.get_waveform(*query) file_ = os.path.join(self.datapath, filename) expected = read(file_) self.assertEqual(got, expected, failmsg(got, expected)) def test_authentication(self): """ Test dataselect with authentication. """ client = self.client_auth # dataselect example queries query = ("IU", "ANMO", "00", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:40:00.000")) filename = "dataselect_example.mseed" got = client.get_waveform(*query) file_ = os.path.join(self.datapath, filename) expected = read(file_) self.assertEqual(got, expected, failmsg(got, expected)) def test_conflicting_params(self): """ """ self.assertRaises(FDSNException, self.client.get_stations, network="IU", net="IU") def test_help_function_with_IRIS(self): """ Tests the help function with the IRIS example. This will have to be adopted any time IRIS changes their implementation. """ try: client = self.client sys.stdout = StringIO() client.help() sys.stdout.close() # Capture output sys.stdout = StringIO() client.help("event") got = sys.stdout.getvalue() expected = ( "Parameter description for the 'event' service (v1.0.6) of " "'http://service.iris.edu':\n" "The service offers the following non-standard parameters:\n" " magtype (str)\n" " type of Magnitude used to test minimum and maximum " "limits (case\n insensitive)\n" " originid (int)\n" " Retrieve an event based on the unique origin ID " "numbers assigned by\n" " the IRIS DMC\n" "WARNING: The service does not offer the following standard " "parameters: magnitudetype\n" "Available catalogs: ANF, UofW, NEIC PDE, ISC, TEST, GCMT\n" "Available contributors: NEIC PDE-W, ANF, University of " "Washington, GCMT-Q, NEIC PDE-Q, UNKNOWN, NEIC ALERT, ISC, " "NEIC PDE-M, GCMT\n") # allow for changes in version number.. self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(got, expected)) # Reset. Creating a new one is faster then clearing the old one. sys.stdout.close() sys.stdout = StringIO() client.help("station") got = sys.stdout.getvalue() expected = ( "Parameter description for the 'station' service (v1.0.7) of " "'http://service.iris.edu':\n" "The service offers the following non-standard parameters:\n" " matchtimeseries (bool)\n" " Specify that the availabilities line up with " "available data. This is\n" " an IRIS extension to the FDSN specification\n") self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(got, expected)) # Reset. sys.stdout.close() sys.stdout = StringIO() client.help("dataselect") got = sys.stdout.getvalue() expected = ( "Parameter description for the 'dataselect' service (v1.0.0) " "of 'http://service.iris.edu':\n" "No derivations from standard detected\n") self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(got, expected)) sys.stdout.close() finally: sys.stdout = sys.__stdout__ def test_str_method(self): got = str(self.client) expected = ( "FDSN Webservice Client (base url: http://service.iris.edu)\n" "Available Services: 'dataselect' (v1.0.0), 'event' (v1.0.6), " "'station' (v1.0.7), 'available_event_contributors', " "'available_event_catalogs'\n\n" "Use e.g. client.help('dataselect') for the\n" "parameter description of the individual services\n" "or client.help() for parameter description of\n" "all webservices.") self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(got, expected))
from obspy.fdsn import Client from plotting_tools import get_coordinates # ########################## INPUT req_client = "RESIF" starttime = None endtime = None network = "YV" station = "*" location = '*' channel = '*H*' file_name = 'list_stas_created.txt' # ########################## END INPUT client = Client(req_client) if starttime: starttime = UTCDateTime(starttime) if endtime: endtime = UTCDateTime(endtime) inv = client.get_stations(network=network, station=station, location=location, channel=channel, starttime=starttime, endtime=endtime, level='channel') content = inv.get_contents() chans = list(set(content['channels'])) chans.sort()
if not opts.__dict__[m]: print "\nmandatory option is missing\n" parser.print_help() exit(-1) # Checking Out File Option: servTypes = ["1", "2", "3"] if opts.serv not in servTypes: print "\nServer FDSN type is not allowed\n" parser.print_help() exit(-1) serv = opts.serv if serv == "3" : fdsn=Client(base_url="IRIS") elif serv == "2" : fdsn=Client(base_url="http://moho.iag.usp.br") else: fdsn=Client(base_url="http://datasisint.unb.br:8080") #teste # Setting up Vars... tmin = UTCDateTime(opts.tmin) tmax = UTCDateTime(opts.tmax) mmin = opts.mmin mmax = opts.mmax lat = opts.lat lon = opts.lon rmin = opts.rmin
def get_quakes(startcoors,endcoors,minmag=5.0): '''Get earthquakes within a region around the start and end coordinates. These will be plotted on the section, with x distance''' client = Client('USGS') #Only get intermediate depth quakes, or deeper mindepth = 60 boxHW = 0.5 quakefile = open('quakedata.dat','w') startlon = startcoors[0] - boxHW startlat = startcoors[1] + boxHW endlon = endcoors[0] + boxHW endlat = endcoors[1] - boxHW minlon = min(startlon,endlon) maxlon = max(startlon,endlon) minlat = min(startlat,endlat) maxlat = max(startlat,endlat) starttime = '1970-01-01' endtime = str(datetime.datetime.today()).split(' ')[0] #print startcoors,endcoors #print minlon,minlat,maxlon,maxlat print '---------------------\nUsing Obspy to get quakes\n---------------------' quakecat = client.get_events(starttime=UTCDateTime(starttime), endtime=UTCDateTime(endtime), minlongitude=minlon, maxlongitude=maxlon, minlatitude=minlat, maxlatitude=maxlat, minmagnitude=minmag,mindepth=mindepth) #Get the moment tensors for these events, if they exist #Currenlty not working #quakes,mts = cat2list(quakecat) #focmecs = [row[4:] for row in mts] for event in quakecat: evlon = event.origins[0].longitude evlat = event.origins[0].latitude evdep = event.origins[0].depth quakefile.write('%s %s %s\n' %(evlon,evlat,evdep)) quakefile.close() #Work out the distance from each quake to the profile line, and write to a file #Create the section coordinates. Should make some sort of auto-decision about the spacing sectionname = 'tmp_toposection.dat' print '---------------------\nMaking section through topography\n---------------------' os.system('gmt project -C%g/%g -E%g/%g -G10 -Q > %s' %(minlon,minlat,maxlon,maxlat,sectionname)) os.system('gmt grdtrack %s -G%s > gridsectiontopo.dat' %(sectionname,topofile)) #Open the topo file and extract the longest distance. This will be used to scale the quake locations infile = open('gridsectiontopo.dat','r') lines = infile.readlines() infile.close() topoX = [] topoY = [] for line in lines: vals = line.split() topoX.append(float(vals[2])) topoY.append(float(vals[3])) maxdist = topoX[-1] topoX = np.array(topoX) topoY = np.array(topoY) print '---------------------\nGetting quake distances\n---------------------' #Make a file containing quakelon, quakelat, dist, and dist along profile os.system('gmt mapproject quakedata.dat -Lgridsectiontopo.dat/k > quake_dist.dat') #Reorder the columns and do another grdtrack to get distance along the profile os.system("awk '{print $5,$6,$1,$2,$3,$4}' quake_dist.dat > quaketmp.dat") os.system("rm quake_dist.dat") #Now, calculate distance along the profile from the start point os.system('gmt mapproject quaketmp.dat -G%g/%g/k > quake_points.dat' %(minlon,minlat)) os.system('rm quaketmp.dat') #Now, open the newly created file and grid section file, and pull the distance data infile1 = open('quake_points.dat','r') lines1 = infile1.readlines() infile1.close() Xdistances_quakes = [] Ydepths_quakes = [] for line in lines1: vals = line.split(' ') try: evlon = float(vals[0].split('\t')[-1]) evlat = float(vals[1]) evdep = float(vals[2]) evdist = float(vals[3].split('\t')[-2]) evdistalong = float(vals[3].split('\t')[-1]) #Only keep if the distance between the event and the profile line is less then 50km if evdist <= 50: Xdistances_quakes.append(evdistalong) Ydepths_quakes.append(-evdep/1000.0) #for some reason, some depths don't exist, so use this try; except statement except: continue os.system('rm quake_points.dat') return Xdistances_quakes, Ydepths_quakes, maxdist, topoX, topoY
from obspy.fdsn import Client as FDSN_Client from obspy.iris import Client as OldIris_Client from obspy.core import UTCDateTime from obspy.core.util import NamedTemporaryFile import matplotlib.pyplot as plt # MW 7.1 Darfield earthquake, New Zealand t1 = UTCDateTime("2010-09-3T16:30:00.000") t2 = UTCDateTime("2010-09-3T17:00:00.000") # Fetch waveform from IRIS FDSN web service into a ObsPy stream object fdsn_client = FDSN_Client("IRIS") st = fdsn_client.get_waveforms('NZ', 'BFZ', '10', 'HHZ', t1, t2) # Download and save instrument response file into a temporary file with NamedTemporaryFile() as tf: respf = tf.name old_iris_client = OldIris_Client() # fetch RESP information from "old" IRIS web service, see obspy.fdsn # for accessing the new IRIS FDSN web services old_iris_client.resp('NZ', 'BFZ', '10', 'HHZ', t1, t2, filename=respf) # make a copy to keep our original data st_orig = st.copy() # define a filter band to prevent amplifying noise during the deconvolution pre_filt = (0.005, 0.006, 30.0, 35.0) # this can be the date of your raw data or any date for which the # SEED RESP-file is valid date = t1
# Author: Hadi Ghasemi # Date: 20-01-16 # Task: Download data within 1000km radius for events listed in catalogue.txt # import modules import pdb import numpy as np from obspy.fdsn import Client from obspy.core import UTCDateTime from obspy.core.util.geodetics import locations2degrees from obspy.taup import TauPyModel # default parameters cat_file = './catalogue.txt' client = Client('IRIS') radius = 10. # in degrees model = TauPyModel(model='iasp91') wl_10deg = 7 * 60. # in second output_dir = './output/' # read catalogue.txt (see plan-phase1.txt for format details) cat = np.loadtxt(cat_file) cat = [cat[8]] for line in cat: eve_lat = line[1] eve_lon = line[2] eve_depth = line[3] eve_ot = UTCDateTime(int(line[4]), int(line[5]), int(line[6]), int(line[7]), int(line[8]), float(line[9])) print("Downloading station inventory for eve_" + str(int(line[0])))
# -*- coding: utf-8 -*- """ Created on Fri Jul 17 15:38:50 2015 @author: boland """ from obspy.fdsn import Client from obspy import UTCDateTime import matplotlib.pyplot as plt import numpy as np from mpl_toolkits.basemap import Basemap import pickle client = Client("GEONET") starttime = UTCDateTime("2014-01-01") endtime = UTCDateTime("2015-01-01") inventory = client.get_stations(network="*", station="*", loc='*', channel="*Z", starttime=starttime, endtime=endtime, level="response") # save all response plots #inventory[0].plot_response(min_freq=1E-4, # channel="BHZ", # location="10",
def download_data(params, station_list, with_waveforms, recording_time, padding_time): """ Still a work in progress (perhaps never finished). Sorts a text file obtained from IRIS (see manual), and parses out the STS and KS instruments (apparently the best ones). Then passes """ # Domain boundaries min_lat = -65 max_lat = 45 min_lon = -47.5 max_lon = 75 # Set up paths and such. lasif_data_path = os.path.join(params['lasif_path'], 'DOWNLOADED_DATA') iteration_xml_path = params['iteration_xml_path'] event_xml_directory = os.path.join(params['lasif_path'], 'EVENTS') event_list = params['event_list'] lasif_stations_path = os.path.join(params['lasif_path'], 'STATIONS', 'StationXML') # Set up station tuple and allowable instruments. station = namedtuple('station', ['network', 'station', 'location', 'sensor', 's_time', 'e_time']) # Read the data and parse out the important components. stations_list = pd.read_csv(station_list, delimiter='|') stations_list.fillna('00', inplace=True) # Filter based on domain boundaries stations_list = stations_list[stations_list.Latitude > min_lat] stations_list = stations_list[stations_list.Latitude < max_lat] stations_list = stations_list[stations_list.Longitude > min_lon] stations_list = stations_list[stations_list.Longitude < max_lon] stations_list = stations_list[stations_list.Location == '00'] stations_list['StartTime'] = \ stations_list['StartTime'].astype(obspy.UTCDateTime) stations_list['EndTime'] = \ stations_list['EndTime'].astype(obspy.UTCDateTime) # Number of events. num_events = len(os.listdir(event_xml_directory)) event_names = sorted([x[:-4] for x in os.listdir(event_xml_directory)]) # Event arrays. networks = stations_list.Network stations = stations_list.Station start_time = stations_list.StartTime end_time = stations_list.EndTime # Waveforms. pool = Pool(processes=NUM_THREADS) pool.map(_download_bulk_waveforms, zip( event_names, repeat(networks), repeat(stations), repeat(start_time), repeat(end_time), repeat(lasif_data_path), repeat(event_xml_directory), repeat(recording_time), repeat(padding_time))) if with_waveforms: return # Get stations. c = Client("IRIS") for x in stations_filt: station_filename = os.path.join( lasif_stations_path, 'station.%s_%s.xml' % (x.network, x.station)) if os.path.exists(station_filename): continue utils.print_ylw( "Downloading StationXML for: %s.%s" % (x.network, x.station)) try: c.get_stations( network=x.network, station=x.station, location="*", channel="*", level="response", filename=station_filename) except: utils.print_red("No data for %s" % (station_filename))
def FDSN_available(input_dics, event, target_path, event_number): """ Check the availablity of FDSN stations :param input_dics: :param event: :param target_path: :param event_number: :return: """ print "Check the availablity of FDSN stations: %s" \ % input_dics['fdsn_base_url'] client_fdsn = Client_fdsn(base_url=input_dics['fdsn_base_url'], user=input_dics['fdsn_user'], password=input_dics['fdsn_pass']) Sta_fdsn = [] try: if input_dics['fdsn_base_url'].lower() in ['resif']: msg = 'WARNING\n' \ 'You have selected RESIF as fdsn_base_url!\n\n' \ 'I have detected some problems in sending request to ' \ 'this data provider:\n' \ 'It seems that the start and end times in channel level ' \ 'are not set correctly.\n\n' \ 'It is better to use --list_stas options other ' \ 'than sending the availability\nrequest directly.' print '\n\n' + 80*'=' print msg print 80*'=' + '\n\n' # start_time = None # end_time = None start_time = event['t1'] end_time = event['t2'] else: start_time = event['t1'] end_time = event['t2'] available = client_fdsn.get_stations( network=input_dics['net'], station=input_dics['sta'], location=input_dics['loc'], channel=input_dics['cha'], starttime=start_time, endtime=end_time, latitude=input_dics['lat_cba'], longitude=input_dics['lon_cba'], minradius=input_dics['mr_cba'], maxradius=input_dics['Mr_cba'], minlatitude=input_dics['mlat_rbb'], maxlatitude=input_dics['Mlat_rbb'], minlongitude=input_dics['mlon_rbb'], maxlongitude=input_dics['Mlon_rbb'], level='channel') for network in available.networks: for station in network: for channel in station: Sta_fdsn.append([network.code, station.code, channel.location_code, channel.code, channel.latitude, channel.longitude, channel.elevation, channel.depth]) if input_dics['fdsn_bulk'] == 'Y': if input_dics['fdsn_update'] != 'N': if os.path.exists(os.path.join(target_path, 'info', 'bulkdata.txt')): os.remove(os.path.join(target_path, 'info', 'bulkdata.txt')) if os.path.exists(os.path.join(target_path, 'info', 'bulkdata.txt')): print 'bulkdata.txt exists in the directory!' else: print 'Start creating a list for bulk request' bulk_list = [] for bulk_sta in Sta_fdsn: if input_dics['cut_time_phase']: t_start, t_end = calculate_time_phase(event, bulk_sta) else: t_start = event['t1'] t_end = event['t2'] bulk_list.append((bulk_sta[0], bulk_sta[1], bulk_sta[2], bulk_sta[3], t_start, t_end)) bulk_list_fio = open(os.path.join(target_path, 'info', 'bulkdata_list'), 'a+') pickle.dump(bulk_list, bulk_list_fio) bulk_list_fio.close() except Exception as e: exc_file = open(os.path.join(target_path, 'info', 'exception'), 'a+') ee = 'fdsn -- Event: %s --- %s\n' % (str(event_number+1), e) exc_file.writelines(ee) exc_file.close() print 'ERROR: %s' % ee if len(Sta_fdsn) == 0: Sta_fdsn.append([]) Sta_fdsn.sort() return Sta_fdsn
class MetaFetch: def __init__(self, network=None, station=None, starttime=None, endtime=None, level='channel', channel='BH?', minlongitude=None, maxlongitude=None, minlatitude=None, maxlatitude=None, savedirectory=None): #initiase all the necessary variables self.network = network self.station = station self.starttime = starttime self.endtime = endtime self.minlongitude = minlongitude self.minlatitude = minlatitude self.maxlongitude = maxlongitude self.maxlatitude = maxlatitude self.level = level self.channel = channel self.savedirectory = savedirectory error = self.raiseinputerrors() if error is not None: print 'ERROR on input' def fetchinventory(self): '''Get an obspy inventory containing all the station information''' self.client = Client( "IRIS") #eventuually change so that we can get data from elsewhere self.Iclient = iclient() if self.station != 'None': self.inventory = self.client.get_stations( network=self.network, station=self.station, level='channel', channel=self.channel, starttime=self.starttime, endtime=self.endtime, minlongitude=self.minlongitude, minlatitude=self.minlatitude, maxlongitude=self.maxlongitude, maxlatitude=self.maxlatitude) else: self.inventory = self.client.get_stations( network=self.network, station=None, level='channel', channel=self.channel, starttime=self.starttime, endtime=self.endtime, minlongitude=self.minlongitude, minlatitude=self.minlatitude, maxlongitude=self.maxlongitude, maxlatitude=self.maxlatitude) def printinventory(self): '''Print out useful metadata about the requested networks and/or channel''' print '########################################' print 'The following information was requested:' for network in self.inventory.networks: for station in network.stations: print '########################################' print station def raiseinputerrors(self): '''If incorrect input is entered, raise errors''' if self.network == None: error = 'A valid network code is needed' if self.starttime == None: error = 'A valid starttime in UTCDateTime format is needed' if self.endtime == None: error = 'A valid endtime in UTCDateTime format is needed' else: error = None def extract_stations(self, writefile=True): '''Make a dictionary containing station,latitude and longitude for that network. Also output a file containing station lon lat depth In its rows ''' networks = {} for network in self.inventory.networks: #should only be one network information = str(network).split('\n')[0] networks[information] = {} for station in network.stations: stationname = str(station).split('\n')[0].split(' ')[1].strip() stationlat = station.latitude stationlon = station.longitude stationelev = station.elevation channels = [] azimuths = [] dips = [] for channel in station.channels: azimuths.append(channel.azimuth) dips.append(channel.dip) code = str(channel.code) #sometimes the code will appear more than once, in which case the data will be downloaded multiple times. Stop this. if code not in channels: channels.append(code) networks[information][stationname] = [ stationlat, stationlon, stationelev, channels, azimuths, dips ] #write the information to a file if writefile: outfilename = information.replace(" ", '_') + '.dat' outfilename = outfilename.replace(')', '') outfilename = outfilename.replace('(', '_') outfilename = outfilename.replace('-', '_') outfilename = outfilename.replace('/', '_') outfile = open(outfilename, 'w') for station in networks[information]: outfile.write('%s %s %s %s\n' % (station, networks[information][station][1], networks[information][station][0], networks[information][station][2])) outfile.close() #should probably move this file to the save location - contains useful information if one wants to make a plot of the station distribution if (self.savedirectory) and ( os.getcwd() != self.savedirectory ): #moves the metadata file to the location speficied by the user os.system('mv %s %s' % (outfilename, self.savedirectory)) return networks
from obspy.fdsn import Client as FDSN_Client from obspy.iris import Client as OldIris_Client from obspy.core import UTCDateTime from obspy.core.util import NamedTemporaryFile import matplotlib.pyplot as plt import numpy as np # MW 7.1 Darfield earthquake, New Zealand t1 = UTCDateTime("2010-09-3T16:30:00.000") t2 = UTCDateTime("2010-09-3T17:00:00.000") # Fetch waveform from IRIS FDSN web service into a ObsPy stream object fdsn_client = FDSN_Client("IRIS") st = fdsn_client.get_waveforms('NZ', 'BFZ', '10', 'HHZ', t1, t2) # Download and save instrument response file into a temporary file with NamedTemporaryFile() as tf: respf = tf.name old_iris_client = OldIris_Client() # fetch RESP information from "old" IRIS web service, see obspy.fdsn # for accessing the new IRIS FDSN web services old_iris_client.resp('NZ', 'BFZ', '10', 'HHZ', t1, t2, filename=respf) # make a copy to keep our original data st_orig = st.copy() # define a filter band to prevent amplifying noise during the deconvolution pre_filt = (0.005, 0.006, 30.0, 35.0) # this can be the date of your raw data or any date for which the # SEED RESP-file is valid
#!/usr/bin/env python #This speeds up the process of fetching seismic data: make a global catalog of events to access. This takes a very long time to run, #but creates a file of up-to-date earthquake information that can be accessed by the trace fetch program from obspy.fdsn import Client from obspy import UTCDateTime import datetime today = datetime.date.today() client = Client('IRIS') t1 = UTCDateTime(1970, 1, 1) t2 = UTCDateTime(today.year, today.month, today.day) client.get_events(starttime=t1, endtime=t2, filename='global_quake_cat.dat', minmagnitude=5.0) #all global events in the catalog #write the parsed version of this file,ready to manipulate. outfile = open('globalquake_parsed.dat', 'w') infile = open('global_quake_cat.dat', 'r') lines = infile.readlines() infile.close() depths = [] lats = [] lons = [] times = [] mags = [] for i, j in enumerate(lines): if j.strip() == '<depth>':
def test_download_urls_for_custom_mapping(self, download_url_mock): """ Tests the downloading of data with custom mappings. """ base_url = "http://example.com" # More extensive mock setup simulation service discovery. def custom_side_effects(*args, **kwargs): if "version" in args[0]: return 200, "1.0.200" elif "event" in args[0]: with open(os.path.join( self.datapath, "2014-01-07_iris_event.wadl"), "rb") as fh: return 200, fh.read() elif "station" in args[0]: with open(os.path.join( self.datapath, "2014-01-07_iris_station.wadl"), "rb") as fh: return 200, fh.read() elif "dataselect" in args[0]: with open(os.path.join( self.datapath, "2014-01-07_iris_dataselect.wadl"), "rb") as fh: return 200, fh.read() return 404, None download_url_mock.side_effect = custom_side_effects # Some custom urls base_url_event = "http://other_url.com/beta/event_service/11" base_url_station = "http://some_url.com/beta2/station/7" base_url_ds = "http://new.com/beta3/dataselect/8" # An exception will be raised if not actual WADLs are returned. # Catch warnings to avoid them being raised for the tests. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") c = Client(base_url=base_url, service_mappings={ "event": base_url_event, "station": base_url_station, "dataselect": base_url_ds, }) for warning in w: self.assertTrue("Could not parse" in str(warning) or "cannot deal with" in str(warning)) # Test the dataselect downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_waveforms("A", "B", "C", "D", UTCDateTime() - 100, UTCDateTime()) except: pass self.assertTrue( base_url_ds in download_url_mock.call_args_list[0][0][0]) # Test the station downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_stations() except: pass self.assertTrue( base_url_station in download_url_mock.call_args_list[0][0][0]) # Test the event downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_events() except: pass self.assertTrue( base_url_event in download_url_mock.call_args_list[0][0][0])
def get_cat(data_center=None, **kwargs): ''' Function to get catalog data from different data center data_center - specify the data center i.e. 'IRIS' Other arguments you can use: :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional :param starttime: Limit to events on or after the specified start time. :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional :param endtime: Limit to events on or before the specified end time. :type minlatitude: float, optional :param minlatitude: Limit to events with a latitude larger than the specified minimum. :type maxlatitude: float, optional :param maxlatitude: Limit to events with a latitude smaller than the specified maximum. :type minlongitude: float, optional :param minlongitude: Limit to events with a longitude larger than the specified minimum. :type maxlongitude: float, optional :param maxlongitude: Limit to events with a longitude smaller than the specified maximum. :type latitude: float, optional :param latitude: Specify the latitude to be used for a radius search. :type longitude: float, optional :param longitude: Specify the longitude to the used for a radius search. :type minradius: float, optional :param minradius: Limit to events within the specified minimum number of degrees from the geographic point defined by the latitude and longitude parameters. :type maxradius: float, optional :param maxradius: Limit to events within the specified maximum number of degrees from the geographic point defined by the latitude and longitude parameters. :type mindepth: float, optional :param mindepth: Limit to events with depth more than the specified minimum. :type maxdepth: float, optional :param maxdepth: Limit to events with depth less than the specified maximum. :type minmagnitude: float, optional :param minmagnitude: Limit to events with a magnitude larger than the specified minimum. :type maxmagnitude: float, optional :param maxmagnitude: Limit to events with a magnitude smaller than the specified maximum. :type magnitudetype: str, optional :param magnitudetype: Specify a magnitude type to use for testing the minimum and maximum limits. :type includeallorigins: bool, optional :param includeallorigins: Specify if all origins for the event should be included, default is data center dependent but is suggested to be the preferred origin only. :type includeallmagnitudes: bool, optional :param includeallmagnitudes: Specify if all magnitudes for the event should be included, default is data center dependent but is suggested to be the preferred magnitude only. :type includearrivals: bool, optional :param includearrivals: Specify if phase arrivals should be included. :type eventid: str or int (dependent on data center), optional :param eventid: Select a specific event by ID; event identifiers are data center specific. :type limit: int, optional :param limit: Limit the results to the specified number of events. :type offset: int, optional :param offset: Return results starting at the event count specified, starting at 1. :type orderby: str, optional :param orderby: Order the result by time or magnitude with the following possibilities: * time: order by origin descending time * time-asc: order by origin ascending time * magnitude: order by descending magnitude * magnitude-asc: order by ascending magnitude :type catalog: str, optional :param catalog: Limit to events from a specified catalog :type contributor: str, optional :param contributor: Limit to events contributed by a specified contributor. :type updatedafter: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional :param updatedafter: Limit to events updated after the specified time. :type filename: str or open file-like object :param filename: If given, the downloaded data will be saved there instead of being parse to an ObsPy object. Thus it will contain the raw data from the webservices. ''' #get the catalog if data_center is None: data_center = 'USGS' client = Client(data_center) sio = StringIO() #save the catalog into a StringIO object cat = client.get_events(filename=sio, **kwargs) #specify the entries you want to replace with (the inconsistent ones) in the following dictionary rep = { "quarry_blast": "quarry blast", "quarry": "quarry blast", "quarry blast_blast": "quarry blast" } #replace the multiple entries, and save the modified entries into StringIO object rep = dict((re.escape(k), v) for k, v in rep.iteritems()) pattern = re.compile("|".join(rep.keys())) sio.seek(0) sio2 = StringIO() sio2.write(pattern.sub(lambda m: rep[re.escape(m.group(0))], sio.buf)) #read the catalog from this StringIO object sio2.seek(0) cat = readEvents(sio2) return cat
def get_cat(data_center = None, **kwargs): ''' Function to get catalog data from different data center data_center - specify the data center i.e. 'IRIS' Other arguments you can use: :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional :param starttime: Limit to events on or after the specified start time. :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional :param endtime: Limit to events on or before the specified end time. :type minlatitude: float, optional :param minlatitude: Limit to events with a latitude larger than the specified minimum. :type maxlatitude: float, optional :param maxlatitude: Limit to events with a latitude smaller than the specified maximum. :type minlongitude: float, optional :param minlongitude: Limit to events with a longitude larger than the specified minimum. :type maxlongitude: float, optional :param maxlongitude: Limit to events with a longitude smaller than the specified maximum. :type latitude: float, optional :param latitude: Specify the latitude to be used for a radius search. :type longitude: float, optional :param longitude: Specify the longitude to the used for a radius search. :type minradius: float, optional :param minradius: Limit to events within the specified minimum number of degrees from the geographic point defined by the latitude and longitude parameters. :type maxradius: float, optional :param maxradius: Limit to events within the specified maximum number of degrees from the geographic point defined by the latitude and longitude parameters. :type mindepth: float, optional :param mindepth: Limit to events with depth more than the specified minimum. :type maxdepth: float, optional :param maxdepth: Limit to events with depth less than the specified maximum. :type minmagnitude: float, optional :param minmagnitude: Limit to events with a magnitude larger than the specified minimum. :type maxmagnitude: float, optional :param maxmagnitude: Limit to events with a magnitude smaller than the specified maximum. :type magnitudetype: str, optional :param magnitudetype: Specify a magnitude type to use for testing the minimum and maximum limits. :type includeallorigins: bool, optional :param includeallorigins: Specify if all origins for the event should be included, default is data center dependent but is suggested to be the preferred origin only. :type includeallmagnitudes: bool, optional :param includeallmagnitudes: Specify if all magnitudes for the event should be included, default is data center dependent but is suggested to be the preferred magnitude only. :type includearrivals: bool, optional :param includearrivals: Specify if phase arrivals should be included. :type eventid: str or int (dependent on data center), optional :param eventid: Select a specific event by ID; event identifiers are data center specific. :type limit: int, optional :param limit: Limit the results to the specified number of events. :type offset: int, optional :param offset: Return results starting at the event count specified, starting at 1. :type orderby: str, optional :param orderby: Order the result by time or magnitude with the following possibilities: * time: order by origin descending time * time-asc: order by origin ascending time * magnitude: order by descending magnitude * magnitude-asc: order by ascending magnitude :type catalog: str, optional :param catalog: Limit to events from a specified catalog :type contributor: str, optional :param contributor: Limit to events contributed by a specified contributor. :type updatedafter: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional :param updatedafter: Limit to events updated after the specified time. :type filename: str or open file-like object :param filename: If given, the downloaded data will be saved there instead of being parse to an ObsPy object. Thus it will contain the raw data from the webservices. ''' #get the catalog if data_center is None: data_center = 'USGS' client = Client(data_center) sio = StringIO() #save the catalog into a StringIO object cat = client.get_events(filename=sio, **kwargs) #specify the entries you want to replace with (the inconsistent ones) in the following dictionary rep = {"quarry_blast": "quarry blast", "quarry": "quarry blast", "quarry blast_blast":"quarry blast" } #replace the multiple entries, and save the modified entries into StringIO object rep = dict((re.escape(k), v) for k, v in rep.iteritems()) pattern = re.compile("|".join(rep.keys())) sio.seek(0) sio2 = StringIO() sio2.write(pattern.sub(lambda m: rep[re.escape(m.group(0))], sio.buf)) #read the catalog from this StringIO object sio2.seek(0) cat = readEvents(sio2) return cat
""" Created on Mon Jun 8 08:44:28 2015 @author: boland """ import numpy as np import obspy from obspy import UTCDateTime, read import datetime from obspy.fdsn import Client import matplotlib.pyplot as plt from mpl_toolkits.basemap import Basemap # set server name. United States Geological Survey. client = Client("IRIS") # get current UTC date in "yy-mm-dd" format and set that as the end of the searched time-window date_end = datetime.datetime.utcnow().date() #convert end date from datetime object to a UTCDateTime object end = UTCDateTime(date_end) # set the time period to scan. in this case we're looking at the previous 10 days no_of_days = 1000.0 # define time difference as a datetime object number_days = datetime.timedelta(days=no_of_days) # set start date for the time-window as the current date minus the number of days set date_start = date_end - number_days
@author: boland """ from obspy.fdsn.header import URL_MAPPINGS from obspy import UTCDateTime from obspy.fdsn import Client # create example start and end times for event search starttime = UTCDateTime('2014-01-01T00:00.000') endtime = UTCDateTime('2015-01-01T00:00.000') endtime = UTCDateTime('2014-02-01T00:00.000') # create list of possible servers to find earthquake events server_list = [] for key in sorted(URL_MAPPINGS.keys()): server_list.append(key) for server in server_list: print server client = Client(server) try: cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=4) #, catalog="ISC") print cat cat.plot() except: continue print "done"
from subprocess import call #for calling unix commands from datetime import date #to give a timestamp to output import time import pdb #for the debugger pdb.set_trace() from matplotlib import pyplot as plt from math import pi,exp,log,sqrt from ConfigParser import SafeConfigParser from obspy import UTCDateTime #################### IMPORT OBSPY MODULES ###################################### # Initialise client object # from obspy.arclink.client import Client from obspy.fdsn import Client client = Client("IRIS") from obspy.fdsn.header import URL_MAPPINGS print "Available Clients are:" for key in sorted(URL_MAPPINGS.keys()): print("{0:<7} {1}".format(key, URL_MAPPINGS[key])) # setup ObsPy-Antelope signal.signal(signal.SIGINT, signal.SIG_DFL) sys.path.append(os.environ['ANTELOPE'] + "/data/python") site.addsitedir('/opt/antelope/local/lib/python' + sys.version[:3]) # import antelope.datascope as datascope from antelope.datascope import dbtmp, dbopen, destroying, closing from antelope.datascope import DbfindEnd, DbaddvError
# IRIS Client is used for infrasound data requests # calling IRIS client will result in a Deprecation Warning and should be ignored # from obspy.iris import Client as IrisClient from time import time import matplotlib.pyplot as plt import numpy as np import os import scipy # # WS clients # irisClient = IrisClient(user_agent=msgLib.param(param, 'userAgent').userAgent) client = Client(user_agent=msgLib.param(param, 'userAgent').userAgent) action = "" # keep track of what you are doing # # RUN ARGUMENTS: # t0 = time() t1 = fileLib.timeIt("START", t0) print "\n" msgLib.message("START") inNetwork = getParam(args, 'net', msgLib, None) inStation = getParam(args, 'sta', msgLib, None) inLocation = staLib.getLocation(getParam(args, 'loc', msgLib, None)) inChannel = getParam(args, 'chan', msgLib,
'all_stations.xml' which contains a station level StationXML file. :copyright: Lion Krischer ([email protected]), 2014 :license: GNU Lesser General Public License, Version 3 (http://www.gnu.org/copyleft/lesser.html) """ import colorama from obspy.station import read_inventory from obspy.fdsn import Client import os output_dir = "StationXML" c = Client() inv = read_inventory("./all_stations.xml") def print_error(msg): print colorama.Fore.RED + msg + colorama.Fore.RESET def print_ok(msg): print colorama.Fore.GREEN + msg + colorama.Fore.RESET for network in inv.networks: for station in network.stations: output_filename = os.path.join(
from obspy.fdsn import Client from obspy import UTCDateTime import numpy as np import matplotlib.pyplot as plt import matplotlib.mlab as ml import scipy from scipy.interpolate import griddata client = Client() def getWave(network, station, number, channel, UTC, dur): """ Downloads miniseed datasets through the obspy.fdsn function. """ t = UTCDateTime(UTC) st = client.get_waveforms(network, station, number, channel, t, t + dur, attach_response=True) print st return st def preprocess(stream): """Carries out simple preprocessing of trace, by first merging the stream, removing instrumetn response, highpass filtering at 0.2 Hz then tapering""" stream.merge() stream.remove_response(output="vel") stream.filter('highpass',freq=0.02,corners=2,zerophase=True) stream.taper(max_percentage=0.01,type='cosine') return stream def spec_amp(stream): """This produces three 1d arrays constructed after splitting the noise trace
class ClientTestCase(unittest.TestCase): """ Test cases for obspy.fdsn.client.Client. """ def __init__(self, *args, **kwargs): """ setupClass() would be better suited for the task at hand but is not supported by Python 2.6. """ super(ClientTestCase, self).__init__(*args, **kwargs) # directory where the test files are located self.path = os.path.dirname(__file__) self.datapath = os.path.join(self.path, "data") self.client = Client(base_url="IRIS", user_agent=USER_AGENT) self.client_auth = \ Client(base_url="IRIS", user_agent=USER_AGENT, user="******", password="******") def test_url_building(self): """ Tests the build_url() functions. """ # Application WADL self.assertEqual( build_url("http://service.iris.edu", "dataselect", 1, "application.wadl"), "http://service.iris.edu/fdsnws/dataselect/1/application.wadl") self.assertEqual( build_url("http://service.iris.edu", "event", 1, "application.wadl"), "http://service.iris.edu/fdsnws/event/1/application.wadl") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "application.wadl"), "http://service.iris.edu/fdsnws/station/1/application.wadl") # Test one parameter. self.assertEqual( build_url("http://service.iris.edu", "dataselect", 1, "query", {"network": "BW"}), "http://service.iris.edu/fdsnws/dataselect/1/query?network=BW") self.assertEqual( build_url("http://service.iris.edu", "dataselect", 1, "queryauth", {"network": "BW"}), "http://service.iris.edu/fdsnws/dataselect/1/queryauth?network=BW") # Test two parameters. Note random order, two possible results. self.assertTrue( build_url("http://service.iris.edu", "dataselect", 1, "query", { "net": "A", "sta": "BC" }) in ("http://service.iris.edu/fdsnws/dataselect/1/query?net=A&sta=BC", "http://service.iris.edu/fdsnws/dataselect/1/query?sta=BC&net=A")) # A wrong service raises a ValueError self.assertRaises(ValueError, build_url, "http://service.iris.edu", "obspy", 1, "query") def test_location_parameters(self): """ Tests how the variety of location values are handled. Why location? Mostly because it is one tricky parameter. It is not uncommon to assume that a non-existant location is "--", but in reality "--" is "<space><space>". This substitution exists because mostly because various applications have trouble digesting spaces (spaces in the URL, for example). The confusion begins when location is treated as empty instead, which would imply "I want all locations" instead of "I only want locations of <space><space>" """ # requests with no specified location should be treated as a wildcard self.assertFalse("--" in build_url( "http://service.iris.edu", "station", 1, "query", { "network": "IU", "station": "ANMO", "starttime": "2013-01-01" })) # location of " " is the same as "--" self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": " "}), "http://service.iris.edu/fdsnws/station/1/query?location=--") # wildcard locations are valid. Will be encoded. self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "*"}), "http://service.iris.edu/fdsnws/station/1/query?location=%2A") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "A?"}), "http://service.iris.edu/fdsnws/station/1/query?location=A%3F") # lists are valid, including <space><space> lists. Again encoded # result. self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": " ,1?,?0"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=--%2C1%3F%2C%3F0") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "1?,--,?0"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=1%3F%2C--%2C%3F0") # Test all three special cases with empty parameters into lists. self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": " ,AA,BB"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=--%2CAA%2CBB") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "AA, ,BB"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=AA%2C--%2CBB") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "AA,BB, "}), "http://service.iris.edu/fdsnws/station/1/query?" "location=AA%2CBB%2C--") def test_url_building_with_auth(self): """ Tests the Client._build_url() method with authentication. Necessary on top of test_url_building test case because clients with authentication have to build different URLs for dataselect. """ # no authentication got = self.client._build_url("dataselect", "query", {'net': "BW"}) expected = "http://service.iris.edu/fdsnws/dataselect/1/query?net=BW" self.assertEqual(got, expected) # with authentication got = self.client_auth._build_url("dataselect", "query", {'net': "BW"}) expected = ("http://service.iris.edu/fdsnws/dataselect/1/" "queryauth?net=BW") self.assertEqual(got, expected) def test_service_discovery_iris(self): """ Tests the automatic discovery of services with the IRIS endpoint. The test parameters are taken from IRIS' website. This will have to be adjusted once IRIS changes their implementation. """ client = self.client self.assertEqual( set(client.services.keys()), set(("dataselect", "event", "station", "available_event_contributors", "available_event_catalogs"))) # The test sets are copied from the IRIS webpage. self.assertEqual( set(client.services["dataselect"].keys()), set(("starttime", "endtime", "network", "station", "location", "channel", "quality", "minimumlength", "longestonly"))) self.assertEqual( set(client.services["station"].keys()), set(("starttime", "endtime", "startbefore", "startafter", "endbefore", "endafter", "network", "station", "location", "channel", "minlatitude", "maxlatitude", "minlongitude", "maxlongitude", "latitude", "longitude", "minradius", "maxradius", "level", "includerestricted", "includeavailability", "updatedafter", "matchtimeseries"))) self.assertEqual( set(client.services["event"].keys()), set(( "starttime", "endtime", "minlatitude", "maxlatitude", "minlongitude", "maxlongitude", "latitude", "longitude", "maxradius", "minradius", "mindepth", "maxdepth", "minmagnitude", "maxmagnitude", "magnitudetype", "catalog", "contributor", "limit", "offset", "orderby", "updatedafter", "includeallorigins", "includeallmagnitudes", "includearrivals", "eventid", "originid" # XXX: This is currently just specified in the # WADL. ))) # Also check an exemplary value in more detail. minradius = client.services["event"]["minradius"] self.assertEqual(minradius["default_value"], 0.0) self.assertEqual(minradius["required"], False) self.assertEqual(minradius["doc"], "") self.assertEqual( minradius["doc_title"], "Specify minimum distance " "from the geographic point defined by latitude and " "longitude") self.assertEqual(minradius["type"], float) self.assertEqual(minradius["options"], []) def test_IRIS_event_catalog_availability(self): """ Tests the parsing of the available event catalogs. """ self.assertEqual( set(self.client.services["available_event_catalogs"]), set(("ANF", "GCMT", "TEST", "ISC", "UofW", "NEIC PDE"))) def test_IRIS_event_contributors_availability(self): """ Tests the parsing of the available event contributors. """ self.assertEqual( set(self.client.services["available_event_contributors"]), set(("University of Washington", "ANF", "GCMT", "GCMT-Q", "ISC", "NEIC ALERT", "NEIC PDE-W", "UNKNOWN", "NEIC PDE-M", "NEIC PDE-Q"))) def test_simple_XML_parser(self): """ Tests the simple XML parsing helper function. """ catalogs = parse_simple_xml(""" <?xml version="1.0"?> <Catalogs> <total>6</total> <Catalog>ANF</Catalog> <Catalog>GCMT</Catalog> <Catalog>TEST</Catalog> <Catalog>ISC</Catalog> <Catalog>UofW</Catalog> <Catalog>NEIC PDE</Catalog> </Catalogs>""") self.assertEqual( catalogs, { "catalogs": set( ("ANF", "GCMT", "TEST", "ISC", "UofW", "NEIC PDE")) }) def test_IRIS_example_queries_event(self): """ Tests the (sometimes modified) example queries given on IRIS webpage. """ client = self.client queries = [ dict(eventid=609301), dict(starttime=UTCDateTime("2011-01-07T01:00:00"), endtime=UTCDateTime("2011-01-07T02:00:00"), catalog="NEIC PDE"), dict(starttime=UTCDateTime("2011-01-07T14:00:00"), endtime=UTCDateTime("2011-01-08T00:00:00"), minlatitude=15, maxlatitude=40, minlongitude=-170, maxlongitude=170, includeallmagnitudes=True, minmagnitude=4, orderby="magnitude"), ] result_files = [ "events_by_eventid.xml", "events_by_time.xml", "events_by_misc.xml", ] for query, filename in zip(queries, result_files): file_ = os.path.join(self.datapath, filename) # query["filename"] = file_ got = client.get_events(**query) expected = readEvents(file_) self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_events(filename=tf.name, **query) with open(tf.name) as fh: got = fh.read() with open(file_) as fh: expected = fh.read() self.assertEqual(got, expected, failmsg(got, expected)) def test_IRIS_example_queries_station(self): """ Tests the (sometimes modified) example queries given on IRIS webpage. """ client = self.client queries = [ dict(latitude=-56.1, longitude=-26.7, maxradius=15), dict(startafter=UTCDateTime("2003-01-07"), endbefore=UTCDateTime("2011-02-07"), minlatitude=15, maxlatitude=55, minlongitude=170, maxlongitude=-170), dict(starttime=UTCDateTime("2013-01-01"), network="IU", sta="ANMO", level="channel"), dict(starttime=UTCDateTime("2013-01-01"), network="IU", sta="A*", location="00", level="channel", format="text"), ] result_files = [ "stations_by_latlon.xml", "stations_by_misc.xml", "stations_by_station.xml", "stations_by_station_wildcard.xml", ] for query, filename in zip(queries, result_files): file_ = os.path.join(self.datapath, filename) # query["filename"] = file_ got = client.get_stations(**query) expected = read_inventory(file_, format="STATIONXML") # delete both creating times and modules before comparing objects. got.created = None expected.created = None got.module = None expected.module = None self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_stations(filename=tf.name, **query) with open(tf.name) as fh: got = fh.read() with open(file_) as fh: expected = fh.read() ignore_lines = ['<Created>', '<TotalNumberStations>', '<Module>'] msg = failmsg(got, expected, ignore_lines=ignore_lines) self.assertEqual(msg, "", msg) def test_IRIS_example_queries_dataselect(self): """ Tests the (sometimes modified) example queries given on IRIS webpage. """ client = self.client queries = [ ("IU", "ANMO", "00", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:40:00.000")), ("IU", "A*", "*", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:31:00.000")), ("IU", "A??", "*0", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:31:00.000")), ] result_files = [ "dataselect_example.mseed", "dataselect_example_wildcards.mseed", "dataselect_example_mixed_wildcards.mseed", ] for query, filename in zip(queries, result_files): # test output to stream got = client.get_waveforms(*query) file_ = os.path.join(self.datapath, filename) expected = read(file_) self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_waveforms(*query, filename=tf.name) with open(tf.name) as fh: got = fh.read() with open(file_) as fh: expected = fh.read() self.assertEqual(got, expected, failmsg(got, expected)) def test_authentication(self): """ Test dataselect with authentication. """ client = self.client_auth # dataselect example queries query = ("IU", "ANMO", "00", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:40:00.000")) filename = "dataselect_example.mseed" got = client.get_waveforms(*query) file_ = os.path.join(self.datapath, filename) expected = read(file_) self.assertEqual(got, expected, failmsg(got, expected)) def test_conflicting_params(self): """ """ self.assertRaises(FDSNException, self.client.get_stations, network="IU", net="IU") def test_help_function_with_IRIS(self): """ Tests the help function with the IRIS example. This will have to be adopted any time IRIS changes their implementation. """ try: client = self.client sys.stdout = StringIO() client.help() sys.stdout.close() # Capture output sys.stdout = StringIO() client.help("event") got = sys.stdout.getvalue() sys.stdout.close() sys.stdout = sys.__stdout__ expected = ( "Parameter description for the 'event' service (v1.0.6) of " "'http://service.iris.edu':\n" "The service offers the following non-standard parameters:\n" " originid (int)\n" " Retrieve an event based on the unique origin ID " "numbers assigned by\n" " the IRIS DMC\n" "Available catalogs: ANF, UofW, NEIC PDE, ISC, TEST, GCMT\n" "Available contributors: NEIC PDE-W, ANF, University of " "Washington, GCMT-Q, NEIC PDE-Q, UNKNOWN, NEIC ALERT, ISC, " "NEIC PDE-M, GCMT\n") # allow for changes in version number.. self.assertEqual( normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) # Reset. Creating a new one is faster then clearing the old one. sys.stdout = StringIO() client.help("station") got = sys.stdout.getvalue() sys.stdout.close() sys.stdout = sys.__stdout__ expected = ( "Parameter description for the 'station' service (v1.0.7) of " "'http://service.iris.edu':\n" "The service offers the following non-standard parameters:\n" " matchtimeseries (bool)\n" " Specify that the availabilities line up with " "available data. This is\n" " an IRIS extension to the FDSN specification\n") self.assertEqual( normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) # Reset. sys.stdout = StringIO() client.help("dataselect") got = sys.stdout.getvalue() expected = ( "Parameter description for the 'dataselect' service (v1.0.0) " "of 'http://service.iris.edu':\n" "No derivations from standard detected\n") self.assertEqual( normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) sys.stdout.close() finally: sys.stdout = sys.__stdout__ def test_str_method(self): got = str(self.client) expected = ( "FDSN Webservice Client (base url: http://service.iris.edu)\n" "Available Services: 'dataselect' (v1.0.0), 'event' (v1.0.6), " "'station' (v1.0.7), 'available_event_contributors', " "'available_event_catalogs'\n\n" "Use e.g. client.help('dataselect') for the\n" "parameter description of the individual services\n" "or client.help() for parameter description of\n" "all webservices.") self.assertEqual( normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) def test_bulk(self): """ Test bulk requests, POSTing data to server. Also tests authenticated bulk request. """ clients = [self.client, self.client_auth] file1 = os.path.join(self.datapath, "bulk1.mseed") file2 = os.path.join(self.datapath, "bulk2.mseed") expected1 = read(file1) expected2 = read(file2) # test cases for providing lists of lists bulk1 = (("TA", "A25A", "", "BHZ", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:04")), ("IU", "ANMO", "*", "BH?", UTCDateTime("2010-03-25"), UTCDateTime("2010-03-25T00:00:08")), ("IU", "ANMO", "10", "HHZ", UTCDateTime("2010-05-25T00:00:00"), UTCDateTime("2010-05-25T00:00:04")), ("II", "KURK", "00", "BHN", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:04"))) bulk2 = (("TA", "A25A", "", "BHZ", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:04")), ("TA", "A25A", "", "BHE", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:06")), ("IU", "ANMO", "*", "HHZ", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:08"))) params2 = dict(quality="B", longestonly=False, minimumlength=5) for client in clients: # test output to stream got = client.get_waveforms_bulk(bulk1) self.assertEqual(got, expected1, failmsg(got, expected1)) got = client.get_waveforms_bulk(bulk2, **params2) self.assertEqual(got, expected2, failmsg(got, expected2)) # test output to file with NamedTemporaryFile() as tf: client.get_waveforms_bulk(bulk1, filename=tf.name) got = read(tf.name) self.assertEqual(got, expected1, failmsg(got, expected1)) with NamedTemporaryFile() as tf: client.get_waveforms_bulk(bulk2, filename=tf.name, **params2) got = read(tf.name) self.assertEqual(got, expected2, failmsg(got, expected2)) # test cases for providing a request string bulk1 = ("TA A25A -- BHZ 2010-03-25T00:00:00 2010-03-25T00:00:04\n" "IU ANMO * BH? 2010-03-25 2010-03-25T00:00:08\n" "IU ANMO 10 HHZ 2010-05-25T00:00:00 2010-05-25T00:00:04\n" "II KURK 00 BHN 2010-03-25T00:00:00 2010-03-25T00:00:04\n") bulk2 = ("quality=B\n" "longestonly=false\n" "minimumlength=5\n" "TA A25A -- BHZ 2010-03-25T00:00:00 2010-03-25T00:00:04\n" "TA A25A -- BHE 2010-03-25T00:00:00 2010-03-25T00:00:06\n" "IU ANMO * HHZ 2010-03-25T00:00:00 2010-03-25T00:00:08\n") for client in clients: # test output to stream got = client.get_waveforms_bulk(bulk1) self.assertEqual(got, expected1, failmsg(got, expected1)) got = client.get_waveforms_bulk(bulk2) self.assertEqual(got, expected2, failmsg(got, expected2)) # test output to file with NamedTemporaryFile() as tf: client.get_waveforms_bulk(bulk1, filename=tf.name) got = read(tf.name) self.assertEqual(got, expected1, failmsg(got, expected1)) with NamedTemporaryFile() as tf: client.get_waveforms_bulk(bulk2, filename=tf.name) got = read(tf.name) self.assertEqual(got, expected2, failmsg(got, expected2)) # test cases for providing a filename for client in clients: with NamedTemporaryFile() as tf: with open(tf.name, "wb") as fh: fh.write(bulk1) got = client.get_waveforms_bulk(bulk1) self.assertEqual(got, expected1, failmsg(got, expected1)) with NamedTemporaryFile() as tf: with open(tf.name, "wb") as fh: fh.write(bulk2) got = client.get_waveforms_bulk(bulk2) self.assertEqual(got, expected2, failmsg(got, expected2)) # test cases for providing a file-like object for client in clients: got = client.get_waveforms_bulk(StringIO(bulk1)) self.assertEqual(got, expected1, failmsg(got, expected1)) got = client.get_waveforms_bulk(StringIO(bulk2)) self.assertEqual(got, expected2, failmsg(got, expected2)) def test_get_waveform_attach_response(self): """ minimal test for automatic attaching of metadata """ client = self.client bulk = ("TA A25A -- BHZ 2010-03-25T00:00:00 2010-03-25T00:00:04\n" "IU ANMO * BH? 2010-03-25 2010-03-25T00:00:08\n" "IU ANMO 10 HHZ 2010-05-25T00:00:00 2010-05-25T00:00:04\n" "II KURK 00 BHN 2010-03-25T00:00:00 2010-03-25T00:00:04\n") st = client.get_waveforms_bulk(bulk, attach_response=True) for tr in st: self.assertTrue(isinstance(tr.stats.get("response"), Response)) st = client.get_waveforms("IU", "ANMO", "00", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:40:00.000"), attach_response=True) for tr in st: self.assertTrue(isinstance(tr.stats.get("response"), Response)) @mock.patch("obspy.fdsn.client.download_url") def test_default_requested_urls(self, download_url_mock): """ Five request should be sent upon initializing a client. Test these. """ download_url_mock.return_value = (404, None) base_url = "http://example.com" # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/1/contributors" % base_url, "%s/fdsnws/event/1/catalogs" % base_url, "%s/fdsnws/event/1/application.wadl" % base_url, "%s/fdsnws/station/1/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) @mock.patch("obspy.fdsn.client.download_url") def test_setting_service_major_version(self, download_url_mock): """ Test the setting of custom major versions. """ download_url_mock.return_value = (404, None) base_url = "http://example.com" # Passing an empty dictionary results in the default urls. major_versions = {} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/1/contributors" % base_url, "%s/fdsnws/event/1/catalogs" % base_url, "%s/fdsnws/event/1/application.wadl" % base_url, "%s/fdsnws/station/1/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace all download_url_mock.reset_mock() download_url_mock.return_value = (404, None) major_versions = {"event": 7, "station": 8, "dataselect": 9} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/7/contributors" % base_url, "%s/fdsnws/event/7/catalogs" % base_url, "%s/fdsnws/event/7/application.wadl" % base_url, "%s/fdsnws/station/8/application.wadl" % base_url, "%s/fdsnws/dataselect/9/application.wadl" % base_url, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace only some download_url_mock.reset_mock() download_url_mock.return_value = (404, None) major_versions = {"event": 7, "station": 8} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/7/contributors" % base_url, "%s/fdsnws/event/7/catalogs" % base_url, "%s/fdsnws/event/7/application.wadl" % base_url, "%s/fdsnws/station/8/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) @mock.patch("obspy.fdsn.client.download_url") def test_setting_service_provider_mappings(self, download_url_mock): """ Tests the setting of per service endpoints """ base_url = "http://example.com" # Replace all. download_url_mock.return_value = (404, None) # Some custom urls base_url_event = "http://other_url.com/beta/event_service/11" base_url_station = "http://some_url.com/beta2/stat_serv/7" base_url_ds = "http://new.com/beta3/waveforms/8" # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, service_mappings={ "event": base_url_event, "station": base_url_station, "dataselect": base_url_ds, }) except FDSNException: pass expected_urls = sorted([ "%s/contributors" % base_url_event, "%s/catalogs" % base_url_event, "%s/application.wadl" % base_url_event, "%s/application.wadl" % base_url_station, "%s/application.wadl" % base_url_ds, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace only two. The others keep the default mapping. download_url_mock.reset_mock() download_url_mock.return_value = (404, None) # Some custom urls base_url_station = "http://some_url.com/beta2/stat_serv/7" base_url_ds = "http://new.com/beta3/waveforms/8" # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, service_mappings={ "station": base_url_station, "dataselect": base_url_ds, }) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/1/contributors" % base_url, "%s/fdsnws/event/1/catalogs" % base_url, "%s/fdsnws/event/1/application.wadl" % base_url, "%s/application.wadl" % base_url_station, "%s/application.wadl" % base_url_ds, ]) got_urls = sorted( [_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) def test_manually_deactivate_single_service(self): """ Test manually deactivating a single service. """ client = Client(base_url="IRIS", user_agent=USER_AGENT, service_mappings={"event": None}) self.assertEqual(sorted(client.services.keys()), ['dataselect', 'station']) @mock.patch("obspy.fdsn.client.download_url") def test_download_urls_for_custom_mapping(self, download_url_mock): """ Tests the downloading of data with custom mappings. """ base_url = "http://example.com" # More extensive mock setup simulation service discovery. def custom_side_effects(*args, **kwargs): if "version" in args[0]: return 200, "1.0.200" elif "event" in args[0]: with open( os.path.join(self.datapath, "2014-01-07_iris_event.wadl")) as fh: return 200, fh.read() elif "station" in args[0]: with open( os.path.join(self.datapath, "2014-01-07_iris_station.wadl")) as fh: return 200, fh.read() elif "dataselect" in args[0]: with open( os.path.join(self.datapath, "2014-01-07_iris_dataselect.wadl")) as fh: return 200, fh.read() return 404, None download_url_mock.side_effect = custom_side_effects # Some custom urls base_url_event = "http://other_url.com/beta/event_service/11" base_url_station = "http://some_url.com/beta2/station/7" base_url_ds = "http://new.com/beta3/dataselect/8" # An exception will be raised if not actual WADLs are returned. c = Client(base_url=base_url, service_mappings={ "event": base_url_event, "station": base_url_station, "dataselect": base_url_ds, }) # Test the dataselect downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_waveforms("A", "B", "C", "D", UTCDateTime() - 100, UTCDateTime()) except: pass self.assertTrue( base_url_ds in download_url_mock.call_args_list[0][0][0]) # Test the station downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_stations() except: pass self.assertTrue( base_url_station in download_url_mock.call_args_list[0][0][0]) # Test the event downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_events() except: pass self.assertTrue( base_url_event in download_url_mock.call_args_list[0][0][0])
import sys import glob import argparse print 'Importing Obspy modules...' from obspy.fdsn import Client import obspy from obspy import read import obspy.signal import pylab as plt from obspy.taup.taup import getTravelTimes from obspy.core.util import locations2degrees from obspy.iris import Client as iclient from obspy import UTCDateTime IRISclient = iclient() FDSNclient = Client('IRIS') from collections import Counter print 'Done imports' parser = argparse.ArgumentParser() parser.add_argument('-params',action='store',dest='inputfile',help='Input the name of the paramter file if you want to use this to get data') parser.add_argument('-plot',action='store',default=False,dest='plotraypath',help='Append this if you want to produce a .pdf plot of the raypaths in your request. Give the full file path') parser.add_argument('-prep',action='store',dest='inputpath',help='The full file path to the data you want to prepare for a tomography project') parser.add_argument('-phase',action='store',dest='phase',help='The seismic phase you are intersted in: This determines which SAC files are accessed and what the final output is. Choose from S or P') parser.add_argument('-Tcheck',action='store_true',default=False,dest='tcheck',help='Append to check the data directory for timing problems, and report the suspect files')
# -*- coding: utf-8 -*- from obspy.fdsn import Client from obspy import UTCDateTime rede, estacao, b_jday, e_jday = raw_input('Ex: BL AQDB 2015-001 2015-002:\n').split() fdsn = Client(base_url="http://moho.iag.usp.br") start = UTCDateTime("%s" %b_jday) end = UTCDateTime("%s" %e_jday) st = fdsn.get_waveforms("%s" %rede, "%s" %estacao, "", "HHZ", start, end) st = fdsn.get_waveforms("%s" %rede, "%s" %estacao, "", "HHN", start, end) st = fdsn.get_waveforms("%s" %rede, "%s" %estacao, "", "HHE", start, end) st.write("%s.%s..HHZ.D.%s.%s" %(rede, estacao, b_jday[0:4], b_jday[5:] ), "MSEED") st.write("%s.%s..HHN.D.%s.%s" %(rede, estacao, b_jday[0:4], b_jday[5:] ), "MSEED") st.write("%s.%s..HHE.D.%s.%s" %(rede, estacao, b_jday[0:4], b_jday[5:] ), "MSEED")
path = path.strip() path = path.rstrip("\\") isExists = os.path.exists(path) if not isExists: print path + ' created' os.makedirs(path) return True else: print path + ' exists' return False if __name__ == '__main__': client = Client("IRIS") network = "XF" starttime = UTCDateTime("2003-06-01") endtime = UTCDateTime("2003-11-01") # endtime = UTCDateTime("1999-05-19") # endtime = UTCDateTime("1998-12-04") # endtime = UTCDateTime("1998-12-05") events = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=5.5, catalog="ISC") # events.plot() stations = client.get_stations(network=network, station="H*", starttime=starttime, endtime=endtime, level="response") # stations.plot()
global freq_range freq_range = [1./max(period_range), 1./min(period_range)] global acceptible_channels acceptible_channels = ['BHZ', 'MHZ', 'LHZ', 'VHZ', 'UHZ'] #'BNZ', 'MNZ', 'LNZ', 'VNZ', 'UNZ'] outfolder = '/storage/ANT/NZ Station Responses' # create list of all possible FDSN clients that work under obspy. client_list = (u'BGR', u'ETH', u'GEONET', u'GFZ', u'INGV', u'IPGP', u'IRIS', u'KOERI', u'LMU', u'NCEDC', u'NEIP', u'NERIES', u'ODC', u'ORFEUS', u'RESIF', u'SCEDC', u'USGS', u'USP') client = Client("GEONET") starttime = UTCDateTime("2014-01-01") endtime = UTCDateTime("2015-01-01") inventory = client.get_stations(network="*", station="*", loc='*', channel="*Z", starttime=starttime, endtime=endtime, level="response") for net in inventory: print net for sta in net:
# Testing ObsPy with FDSN web services # # see http://docs.obspy.org/packages/obspy.fdsn.html # List all FDSN locations (example from link above) ---------------------------- from obspy import UTCDateTime from obspy.fdsn import Client client = Client("IRIS") from obspy.fdsn.header import URL_MAPPINGS for key in sorted(URL_MAPPINGS.keys()): print("{0:<7} {1}".format(key, URL_MAPPINGS[key])) # Inventory (from link above) -------------------------------------------------- starttime = UTCDateTime("2002-01-01") endtime = UTCDateTime("2002-01-02") inventory = client.get_stations(network="IU", station="A*", starttime=starttime, endtime=endtime) print(inventory) # NCEDC channels request ------------------------------------------------------- client = Client("NCEDC")
class ClientTestCase(unittest.TestCase): """ Test cases for obspy.fdsn.client.Client. """ def __init__(self, *args, **kwargs): """ setupClass() would be better suited for the task at hand but is not supported by Python 2.6. """ super(ClientTestCase, self).__init__(*args, **kwargs) # directory where the test files are located self.path = os.path.dirname(__file__) self.datapath = os.path.join(self.path, "data") self.client = Client(base_url="IRIS", user_agent=USER_AGENT) self.client_auth = \ Client(base_url="IRIS", user_agent=USER_AGENT, user="******", password="******") def test_url_building(self): """ Tests the build_url() functions. """ # Application WADL self.assertEqual( build_url("http://service.iris.edu", "dataselect", 1, "application.wadl"), "http://service.iris.edu/fdsnws/dataselect/1/application.wadl") self.assertEqual( build_url("http://service.iris.edu", "event", 1, "application.wadl"), "http://service.iris.edu/fdsnws/event/1/application.wadl") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "application.wadl"), "http://service.iris.edu/fdsnws/station/1/application.wadl") # Test one parameter. self.assertEqual( build_url("http://service.iris.edu", "dataselect", 1, "query", {"network": "BW"}), "http://service.iris.edu/fdsnws/dataselect/1/query?network=BW") self.assertEqual( build_url("http://service.iris.edu", "dataselect", 1, "queryauth", {"network": "BW"}), "http://service.iris.edu/fdsnws/dataselect/1/queryauth?network=BW") # Test two parameters. Note random order, two possible results. self.assertTrue( build_url("http://service.iris.edu", "dataselect", 1, "query", {"net": "A", "sta": "BC"}) in ("http://service.iris.edu/fdsnws/dataselect/1/query?net=A&sta=BC", "http://service.iris.edu/fdsnws/dataselect/1/query?sta=BC&net=A")) # A wrong service raises a ValueError self.assertRaises(ValueError, build_url, "http://service.iris.edu", "obspy", 1, "query") def test_location_parameters(self): """ Tests how the variety of location values are handled. Why location? Mostly because it is one tricky parameter. It is not uncommon to assume that a non-existent location is "--", but in reality "--" is "<space><space>". This substitution exists because mostly because various applications have trouble digesting spaces (spaces in the URL, for example). The confusion begins when location is treated as empty instead, which would imply "I want all locations" instead of "I only want locations of <space><space>" """ # requests with no specified location should be treated as a wildcard self.assertFalse( "--" in build_url("http://service.iris.edu", "station", 1, "query", {"network": "IU", "station": "ANMO", "starttime": "2013-01-01"})) # location of " " is the same as "--" self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": " "}), "http://service.iris.edu/fdsnws/station/1/query?location=--") # wildcard locations are valid. Will be encoded. self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "*"}), "http://service.iris.edu/fdsnws/station/1/query?location=%2A") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "A?"}), "http://service.iris.edu/fdsnws/station/1/query?location=A%3F") # lists are valid, including <space><space> lists. Again encoded # result. self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": " ,1?,?0"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=--%2C1%3F%2C%3F0") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "1?,--,?0"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=1%3F%2C--%2C%3F0") # Test all three special cases with empty parameters into lists. self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": " ,AA,BB"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=--%2CAA%2CBB") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "AA, ,BB"}), "http://service.iris.edu/fdsnws/station/1/query?" "location=AA%2C--%2CBB") self.assertEqual( build_url("http://service.iris.edu", "station", 1, "query", {"location": "AA,BB, "}), "http://service.iris.edu/fdsnws/station/1/query?" "location=AA%2CBB%2C--") def test_url_building_with_auth(self): """ Tests the Client._build_url() method with authentication. Necessary on top of test_url_building test case because clients with authentication have to build different URLs for dataselect. """ # no authentication got = self.client._build_url("dataselect", "query", {'net': "BW"}) expected = "http://service.iris.edu/fdsnws/dataselect/1/query?net=BW" self.assertEqual(got, expected) # with authentication got = self.client_auth._build_url("dataselect", "query", {'net': "BW"}) expected = ("http://service.iris.edu/fdsnws/dataselect/1/" "queryauth?net=BW") self.assertEqual(got, expected) def test_service_discovery_iris(self): """ Tests the automatic discovery of services with the IRIS endpoint. The test parameters are taken from IRIS' website. This will have to be adjusted once IRIS changes their implementation. """ client = self.client self.assertEqual(set(client.services.keys()), set(("dataselect", "event", "station", "available_event_contributors", "available_event_catalogs"))) # The test sets are copied from the IRIS webpage. self.assertEqual( set(client.services["dataselect"].keys()), set(("starttime", "endtime", "network", "station", "location", "channel", "quality", "minimumlength", "longestonly"))) self.assertEqual( set(client.services["station"].keys()), set(("starttime", "endtime", "startbefore", "startafter", "endbefore", "endafter", "network", "station", "location", "channel", "minlatitude", "maxlatitude", "minlongitude", "maxlongitude", "latitude", "longitude", "minradius", "maxradius", "level", "includerestricted", "includeavailability", "updatedafter", "matchtimeseries"))) self.assertEqual( set(client.services["event"].keys()), set(("starttime", "endtime", "minlatitude", "maxlatitude", "minlongitude", "maxlongitude", "latitude", "longitude", "maxradius", "minradius", "mindepth", "maxdepth", "minmagnitude", "maxmagnitude", "magnitudetype", "catalog", "contributor", "limit", "offset", "orderby", "updatedafter", "includeallorigins", "includeallmagnitudes", "includearrivals", "eventid", "originid" # XXX: This is currently just specified in the # WADL. ))) # Also check an exemplary value in more detail. minradius = client.services["event"]["minradius"] self.assertEqual(minradius["default_value"], 0.0) self.assertEqual(minradius["required"], False) self.assertEqual(minradius["doc"], "") self.assertEqual(minradius["doc_title"], "Specify minimum distance " "from the geographic point defined by latitude and " "longitude") self.assertEqual(minradius["type"], float) self.assertEqual(minradius["options"], []) def test_IRIS_event_catalog_availability(self): """ Tests the parsing of the available event catalogs. """ self.assertEqual(set(self.client.services["available_event_catalogs"]), set(("ANF", "GCMT", "TEST", "ISC", "UofW", "NEIC PDE"))) def test_IRIS_event_contributors_availability(self): """ Tests the parsing of the available event contributors. """ self.assertEqual(set( self.client.services["available_event_contributors"]), set(("University of Washington", "ANF", "GCMT", "GCMT-Q", "ISC", "NEIC ALERT", "NEIC PDE-W", "UNKNOWN", "NEIC PDE-M", "NEIC COMCAT", "NEIC PDE-Q"))) def test_simple_XML_parser(self): """ Tests the simple XML parsing helper function. """ catalogs = parse_simple_xml(""" <?xml version="1.0"?> <Catalogs> <total>6</total> <Catalog>ANF</Catalog> <Catalog>GCMT</Catalog> <Catalog>TEST</Catalog> <Catalog>ISC</Catalog> <Catalog>UofW</Catalog> <Catalog>NEIC PDE</Catalog> </Catalogs>""") self.assertEqual(catalogs, {"catalogs": set(("ANF", "GCMT", "TEST", "ISC", "UofW", "NEIC PDE"))}) def test_IRIS_example_queries_event(self): """ Tests the (sometimes modified) example queries given on the IRIS web page. """ client = self.client queries = [ dict(eventid=609301), dict(starttime=UTCDateTime("2001-01-07T01:00:00"), endtime=UTCDateTime("2001-01-07T01:05:00"), catalog="ISC"), dict(starttime=UTCDateTime("2001-01-07T14:00:00"), endtime=UTCDateTime("2001-01-08T00:00:00"), minlatitude=15, maxlatitude=40, minlongitude=-170, maxlongitude=170, includeallmagnitudes=True, minmagnitude=4, orderby="magnitude"), ] result_files = ["events_by_eventid.xml", "events_by_time.xml", "events_by_misc.xml", ] for query, filename in zip(queries, result_files): file_ = os.path.join(self.datapath, filename) # query["filename"] = file_ got = client.get_events(**query) expected = readEvents(file_) self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_events(filename=tf.name, **query) with open(tf.name, 'rb') as fh: got = fh.read() with open(file_, 'rb') as fh: expected = fh.read() self.assertEqual(got, expected, failmsg(got, expected)) def test_IRIS_example_queries_station(self): """ Tests the (sometimes modified) example queries given on IRIS webpage. """ client = self.client queries = [ dict(latitude=-56.1, longitude=-26.7, maxradius=15), dict(startafter=UTCDateTime("2003-01-07"), endbefore=UTCDateTime("2011-02-07"), minlatitude=15, maxlatitude=55, minlongitude=170, maxlongitude=-170), dict(starttime=UTCDateTime("2000-01-01"), endtime=UTCDateTime("2001-01-01"), net="IU", sta="ANMO"), dict(starttime=UTCDateTime("2000-01-01"), endtime=UTCDateTime("2002-01-01"), network="IU", sta="A*", location="00"), ] result_files = ["stations_by_latlon.xml", "stations_by_misc.xml", "stations_by_station.xml", "stations_by_station_wildcard.xml", ] for query, filename in zip(queries, result_files): file_ = os.path.join(self.datapath, filename) # query["filename"] = file_ got = client.get_stations(**query) expected = read_inventory(file_, format="STATIONXML") # delete both creating times and modules before comparing objects. got.created = None expected.created = None got.module = None expected.module = None # XXX Py3k: the objects differ in direct comparison, however, # the strings of them are equal self.assertEqual(str(got), str(expected), failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_stations(filename=tf.name, **query) with open(tf.name, 'rb') as fh: got = fh.read() with open(file_, 'rb') as fh: expected = fh.read() ignore_lines = [b'<Created>', b'<TotalNumberStations>', b'<Module>', b'<ModuleURI>'] msg = failmsg(got, expected, ignore_lines=ignore_lines) self.assertEqual(msg, "", msg) def test_IRIS_example_queries_dataselect(self): """ Tests the (sometimes modified) example queries given on IRIS webpage. """ client = self.client queries = [ ("IU", "ANMO", "00", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:40:00.000")), ("IU", "A*", "*", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:31:00.000")), ("IU", "A??", "*0", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:31:00.000")), ] result_files = ["dataselect_example.mseed", "dataselect_example_wildcards.mseed", "dataselect_example_mixed_wildcards.mseed", ] for query, filename in zip(queries, result_files): # test output to stream got = client.get_waveforms(*query) file_ = os.path.join(self.datapath, filename) expected = read(file_) self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_waveforms(*query, filename=tf.name) with open(tf.name, 'rb') as fh: got = fh.read() with open(file_, 'rb') as fh: expected = fh.read() self.assertEqual(got, expected, failmsg(got, expected)) def test_authentication(self): """ Test dataselect with authentication. """ client = self.client_auth # dataselect example queries query = ("IU", "ANMO", "00", "BHZ", UTCDateTime("2010-02-27T06:30:00.000"), UTCDateTime("2010-02-27T06:40:00.000")) filename = "dataselect_example.mseed" got = client.get_waveforms(*query) file_ = os.path.join(self.datapath, filename) expected = read(file_) self.assertEqual(got, expected, failmsg(got, expected)) def test_conflicting_params(self): """ """ self.assertRaises(FDSNException, self.client.get_stations, network="IU", net="IU") def test_help_function_with_IRIS(self): """ Tests the help function with the IRIS example. This will have to be adopted any time IRIS changes their implementation. """ try: client = self.client # Capture output tmp = io.StringIO() sys.stdout = tmp client.help("event") got = sys.stdout.getvalue() sys.stdout = sys.__stdout__ tmp.close() expected = ( "Parameter description for the 'event' service (v1.0.6) of " "'http://service.iris.edu':\n" "The service offers the following non-standard parameters:\n" " originid (int)\n" " Retrieve an event based on the unique origin ID " "numbers assigned by\n" " the IRIS DMC\n" "Available catalogs: ANF, UofW, NEIC PDE, ISC, TEST, GCMT\n" "Available contributors: NEIC PDE-W, ANF, University of " "Washington, GCMT-Q, NEIC PDE-Q, UNKNOWN, NEIC ALERT, ISC, " "NEIC PDE-M, NEIC COMCAT, GCMT\n") # allow for changes in version number.. self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) # Reset. Creating a new one is faster then clearing the old one. tmp = io.StringIO() sys.stdout = tmp client.help("station") got = sys.stdout.getvalue() sys.stdout = sys.__stdout__ tmp.close() expected = ( "Parameter description for the 'station' service (v1.0.7) of " "'http://service.iris.edu':\n" "The service offers the following non-standard parameters:\n" " matchtimeseries (bool)\n" " Specify that the availabilities line up with " "available data. This is\n" " an IRIS extension to the FDSN specification\n") self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) # Reset. tmp = io.StringIO() sys.stdout = tmp client.help("dataselect") got = sys.stdout.getvalue() sys.stdout = sys.__stdout__ tmp.close() expected = ( "Parameter description for the 'dataselect' service (v1.0.0) " "of 'http://service.iris.edu':\n" "No derivations from standard detected\n") self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) finally: sys.stdout = sys.__stdout__ def test_str_method(self): got = str(self.client) expected = ( "FDSN Webservice Client (base url: http://service.iris.edu)\n" "Available Services: 'dataselect' (v1.0.0), 'event' (v1.0.6), " "'station' (v1.0.7), 'available_event_contributors', " "'available_event_catalogs'\n\n" "Use e.g. client.help('dataselect') for the\n" "parameter description of the individual services\n" "or client.help() for parameter description of\n" "all webservices.") self.assertEqual(normalize_version_number(got), normalize_version_number(expected), failmsg(normalize_version_number(got), normalize_version_number(expected))) def test_dataselect_bulk(self): """ Test bulk dataselect requests, POSTing data to server. Also tests authenticated bulk request. """ clients = [self.client, self.client_auth] file = os.path.join(self.datapath, "bulk.mseed") expected = read(file) # test cases for providing lists of lists bulk = (("TA", "A25A", "", "BHZ", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:04")), ("TA", "A25A", "", "BHE", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:06")), ("IU", "ANMO", "*", "HHZ", UTCDateTime("2010-03-25T00:00:00"), UTCDateTime("2010-03-25T00:00:08"))) params = dict(quality="B", longestonly=False, minimumlength=5) for client in clients: # test output to stream got = client.get_waveforms_bulk(bulk, **params) self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_waveforms_bulk(bulk, filename=tf.name, **params) got = read(tf.name) self.assertEqual(got, expected, failmsg(got, expected)) # test cases for providing a request string bulk = ("quality=B\n" "longestonly=false\n" "minimumlength=5\n" "TA A25A -- BHZ 2010-03-25T00:00:00 2010-03-25T00:00:04\n" "TA A25A -- BHE 2010-03-25T00:00:00 2010-03-25T00:00:06\n" "IU ANMO * HHZ 2010-03-25T00:00:00 2010-03-25T00:00:08\n") for client in clients: # test output to stream got = client.get_waveforms_bulk(bulk) self.assertEqual(got, expected, failmsg(got, expected)) # test output to file with NamedTemporaryFile() as tf: client.get_waveforms_bulk(bulk, filename=tf.name) got = read(tf.name) self.assertEqual(got, expected, failmsg(got, expected)) # test cases for providing a filename for client in clients: with NamedTemporaryFile() as tf: with open(tf.name, "wt") as fh: fh.write(bulk) got = client.get_waveforms_bulk(bulk) self.assertEqual(got, expected, failmsg(got, expected)) # test cases for providing a file-like object for client in clients: got = client.get_waveforms_bulk(io.StringIO(bulk)) self.assertEqual(got, expected, failmsg(got, expected)) def test_station_bulk(self): """ Test bulk station requests, POSTing data to server. Also tests authenticated bulk request. Does currently only test reading from a list of list. The other input types are tested with the waveform bulk downloader and thus should work just fine. """ clients = [self.client, self.client_auth] # test cases for providing lists of lists starttime = UTCDateTime(1990, 1, 1) endtime = UTCDateTime(1990, 1, 1) + 10 bulk = [ ["IU", "ANMO", "", "BHE", starttime, endtime], ["IU", "CCM", "", "BHZ", starttime, endtime], ["IU", "COR", "", "UHZ", starttime, endtime], ["IU", "HRV", "", "LHN", starttime, endtime], ] for client in clients: # Test with station level. inv = client.get_stations_bulk(bulk, level="station") # Test with output to file. with NamedTemporaryFile() as tf: client.get_stations_bulk( bulk, filename=tf.name, level="station") inv2 = read_inventory(tf.name, format="stationxml") self.assertEqual(inv.networks, inv2.networks) self.assertEqual(len(inv.networks), 1) self.assertEqual(inv[0].code, "IU") self.assertEqual(len(inv.networks[0].stations), 4) self.assertEqual( sorted([_i.code for _i in inv.networks[0].stations]), sorted(["ANMO", "CCM", "COR", "HRV"])) # Test with channel level. inv = client.get_stations_bulk(bulk, level="channel") # Test with output to file. with NamedTemporaryFile() as tf: client.get_stations_bulk( bulk, filename=tf.name, level="channel") inv2 = read_inventory(tf.name, format="stationxml") self.assertEqual(inv.networks, inv2.networks) self.assertEqual(len(inv.networks), 1) self.assertEqual(inv[0].code, "IU") self.assertEqual(len(inv.networks[0].stations), 4) self.assertEqual( sorted([_i.code for _i in inv.networks[0].stations]), sorted(["ANMO", "CCM", "COR", "HRV"])) channels = [] for station in inv[0]: for channel in station: channels.append("IU.%s.%s.%s" % ( station.code, channel.location_code, channel.code)) self.assertEqual( sorted(channels), sorted(["IU.ANMO..BHE", "IU.CCM..BHZ", "IU.COR..UHZ", "IU.HRV..LHN"])) return def test_get_waveform_attach_response(self): """ minimal test for automatic attaching of metadata """ client = self.client bulk = ("IU ANMO 00 BHZ 2000-03-25T00:00:00 2000-03-25T00:00:04\n") st = client.get_waveforms_bulk(bulk, attach_response=True) for tr in st: self.assertTrue(isinstance(tr.stats.get("response"), Response)) st = client.get_waveforms("IU", "ANMO", "00", "BHZ", UTCDateTime("2000-02-27T06:00:00.000"), UTCDateTime("2000-02-27T06:00:05.000"), attach_response=True) for tr in st: self.assertTrue(isinstance(tr.stats.get("response"), Response)) @mock.patch("obspy.fdsn.client.download_url") def test_default_requested_urls(self, download_url_mock): """ Five request should be sent upon initializing a client. Test these. """ download_url_mock.return_value = (404, None) base_url = "http://example.com" # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/1/contributors" % base_url, "%s/fdsnws/event/1/catalogs" % base_url, "%s/fdsnws/event/1/application.wadl" % base_url, "%s/fdsnws/station/1/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted([_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) @mock.patch("obspy.fdsn.client.download_url") def test_setting_service_major_version(self, download_url_mock): """ Test the setting of custom major versions. """ download_url_mock.return_value = (404, None) base_url = "http://example.com" # Passing an empty dictionary results in the default urls. major_versions = {} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/1/contributors" % base_url, "%s/fdsnws/event/1/catalogs" % base_url, "%s/fdsnws/event/1/application.wadl" % base_url, "%s/fdsnws/station/1/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted([_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace all download_url_mock.reset_mock() download_url_mock.return_value = (404, None) major_versions = {"event": 7, "station": 8, "dataselect": 9} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/7/contributors" % base_url, "%s/fdsnws/event/7/catalogs" % base_url, "%s/fdsnws/event/7/application.wadl" % base_url, "%s/fdsnws/station/8/application.wadl" % base_url, "%s/fdsnws/dataselect/9/application.wadl" % base_url, ]) got_urls = sorted([_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace only some download_url_mock.reset_mock() download_url_mock.return_value = (404, None) major_versions = {"event": 7, "station": 8} # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, major_versions=major_versions) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/7/contributors" % base_url, "%s/fdsnws/event/7/catalogs" % base_url, "%s/fdsnws/event/7/application.wadl" % base_url, "%s/fdsnws/station/8/application.wadl" % base_url, "%s/fdsnws/dataselect/1/application.wadl" % base_url, ]) got_urls = sorted([_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) @mock.patch("obspy.fdsn.client.download_url") def test_setting_service_provider_mappings(self, download_url_mock): """ Tests the setting of per service endpoints """ base_url = "http://example.com" # Replace all. download_url_mock.return_value = (404, None) # Some custom urls base_url_event = "http://other_url.com/beta/event_service/11" base_url_station = "http://some_url.com/beta2/stat_serv/7" base_url_ds = "http://new.com/beta3/waveforms/8" # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, service_mappings={ "event": base_url_event, "station": base_url_station, "dataselect": base_url_ds, }) except FDSNException: pass expected_urls = sorted([ "%s/contributors" % base_url_event, "%s/catalogs" % base_url_event, "%s/application.wadl" % base_url_event, "%s/application.wadl" % base_url_station, "%s/application.wadl" % base_url_ds, ]) got_urls = sorted([_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) # Replace only two. The others keep the default mapping. download_url_mock.reset_mock() download_url_mock.return_value = (404, None) # Some custom urls base_url_station = "http://some_url.com/beta2/stat_serv/7" base_url_ds = "http://new.com/beta3/waveforms/8" # An exception will be raised if not actual WADLs are returned. try: Client(base_url=base_url, service_mappings={ "station": base_url_station, "dataselect": base_url_ds, }) except FDSNException: pass expected_urls = sorted([ "%s/fdsnws/event/1/contributors" % base_url, "%s/fdsnws/event/1/catalogs" % base_url, "%s/fdsnws/event/1/application.wadl" % base_url, "%s/application.wadl" % base_url_station, "%s/application.wadl" % base_url_ds, ]) got_urls = sorted([_i[0][0] for _i in download_url_mock.call_args_list]) self.assertEqual(expected_urls, got_urls) def test_manually_deactivate_single_service(self): """ Test manually deactivating a single service. """ client = Client(base_url="IRIS", user_agent=USER_AGENT, service_mappings={"event": None}) self.assertEqual(sorted(client.services.keys()), ['dataselect', 'station']) @mock.patch("obspy.fdsn.client.download_url") def test_download_urls_for_custom_mapping(self, download_url_mock): """ Tests the downloading of data with custom mappings. """ base_url = "http://example.com" # More extensive mock setup simulation service discovery. def custom_side_effects(*args, **kwargs): if "version" in args[0]: return 200, "1.0.200" elif "event" in args[0]: with open(os.path.join( self.datapath, "2014-01-07_iris_event.wadl"), "rb") as fh: return 200, fh.read() elif "station" in args[0]: with open(os.path.join( self.datapath, "2014-01-07_iris_station.wadl"), "rb") as fh: return 200, fh.read() elif "dataselect" in args[0]: with open(os.path.join( self.datapath, "2014-01-07_iris_dataselect.wadl"), "rb") as fh: return 200, fh.read() return 404, None download_url_mock.side_effect = custom_side_effects # Some custom urls base_url_event = "http://other_url.com/beta/event_service/11" base_url_station = "http://some_url.com/beta2/station/7" base_url_ds = "http://new.com/beta3/dataselect/8" # An exception will be raised if not actual WADLs are returned. # Catch warnings to avoid them being raised for the tests. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") c = Client(base_url=base_url, service_mappings={ "event": base_url_event, "station": base_url_station, "dataselect": base_url_ds, }) for warning in w: self.assertTrue("Could not parse" in str(warning) or "cannot deal with" in str(warning)) # Test the dataselect downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_waveforms("A", "B", "C", "D", UTCDateTime() - 100, UTCDateTime()) except: pass self.assertTrue( base_url_ds in download_url_mock.call_args_list[0][0][0]) # Test the station downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_stations() except: pass self.assertTrue( base_url_station in download_url_mock.call_args_list[0][0][0]) # Test the event downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_events() except: pass self.assertTrue( base_url_event in download_url_mock.call_args_list[0][0][0])
from plotting_tools import get_coordinates # ########################## INPUT req_client = "RESIF" starttime = None endtime = None network = "YV" station = "*" location = '*' channel = '*H*' file_name = 'list_stas_created.txt' # ########################## END INPUT client = Client(req_client) if starttime: starttime = UTCDateTime(starttime) if endtime: endtime = UTCDateTime(endtime) inv = client.get_stations(network=network, station=station, location=location, channel=channel, starttime=starttime, endtime=endtime, level='channel') content = inv.get_contents() chans = list(set(content['channels'])) chans.sort() net_inv = inv.networks[0] fio = open(file_name, 'w')
""" Playing around with ObsPy 20140411: Trying to load data and trigger it, then cut out the triggers 20140415: Working on adding PCA of triggers 20140417: Moved PCA and clustering tests to different file, save triggers """ from obspy.fdsn import Client from obspy import UTCDateTime from obspy.signal.trigger import classicSTALTA, triggerOnset import numpy as np # Grab a day from IRIS (HSR is at Mount St. Helens) client = Client("IRIS") # This is a time period where I know lots of repeaters are happening t = UTCDateTime("2004-11-24T00:00:00.000") savename = 'trigdata4.npy' print('Grabbing waveforms...') st = client.get_waveforms("UW", "HSR", "--", "EHZ", t - 10, t + 86420) print('Done!') # Detrend and merge, fill gaps with zeros, bandpass st = st.detrend() st = st.merge(method=1, fill_value=0) st = st.filter('bandpass', freqmin=1.0, freqmax=10.0, corners=2, zerophase=True) # print("Close the window to continue...") # Helicorder plot # st.plot(type='dayplot')
def test_download_urls_for_custom_mapping(self, download_url_mock): """ Tests the downloading of data with custom mappings. """ base_url = "http://example.com" # More extensive mock setup simulation service discovery. def custom_side_effects(*args, **kwargs): if "version" in args[0]: return 200, "1.0.200" elif "event" in args[0]: with open( os.path.join(self.datapath, "2014-01-07_iris_event.wadl"), "rb") as fh: return 200, fh.read() elif "station" in args[0]: with open( os.path.join(self.datapath, "2014-01-07_iris_station.wadl"), "rb") as fh: return 200, fh.read() elif "dataselect" in args[0]: with open( os.path.join(self.datapath, "2014-01-07_iris_dataselect.wadl"), "rb") as fh: return 200, fh.read() return 404, None download_url_mock.side_effect = custom_side_effects # Some custom urls base_url_event = "http://other_url.com/beta/event_service/11" base_url_station = "http://some_url.com/beta2/station/7" base_url_ds = "http://new.com/beta3/dataselect/8" # An exception will be raised if not actual WADLs are returned. # Catch warnings to avoid them being raised for the tests. with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") c = Client(base_url=base_url, service_mappings={ "event": base_url_event, "station": base_url_station, "dataselect": base_url_ds, }) for warning in w: self.assertTrue("Could not parse" in str(warning) or "cannot deal with" in str(warning)) # Test the dataselect downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_waveforms("A", "B", "C", "D", UTCDateTime() - 100, UTCDateTime()) except: pass self.assertTrue( base_url_ds in download_url_mock.call_args_list[0][0][0]) # Test the station downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_stations() except: pass self.assertTrue( base_url_station in download_url_mock.call_args_list[0][0][0]) # Test the event downloading. download_url_mock.reset_mock() download_url_mock.side_effect = None download_url_mock.return_value = 404, None try: c.get_events() except: pass self.assertTrue( base_url_event in download_url_mock.call_args_list[0][0][0])
timelimit = utc("2005-01-01T00:00:00").timestamp # set time limit and all events before this limit will be erased # if reset_catalogue = True database_name = '/storage/MASTERS/CONFIGURATIONS/S_NETWORK/INPUT/DATABASES/timeline.db' # enter the information for catalogue 1 search minlat, maxlat, minlon, maxlon = (-40.0, -12.5, 113.0, 154.0) event_list = [] for c in client_list: print "Processing events from the {} catalogue ... \n".format(c) try: client = Client(c) catalogue = client.get_events(starttime=t1, endtime=t2, minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon) for i in catalogue: event_list.append(i) except Exception as error: print error print event_list