Exemple #1
0
def find_event(st, timebefore=5, timeafter=5, service="IRIS"):
    '''
    Uses the selected webservice to search for an event matching the stream's starttime plus/minus the specified time window.
    
    If multiple streams match, lists them.
    
    Parameters
    ----------
    st : ObsPy Stream object
        Stream of SAC format seismograms for the event in question
    timebefore : float
        Time in seconds before stream start time from which to search catalog for events
    timeafter : float
        Time insseconds after stream start time up to which to search catalog for events
    service : String
        Web service to use to search for events. Same options as for obspy.fdsn.Client. Default is IRIS.

    Returns
    -------
    event : ObsPy Event object
        Downloaded information for the event, if found.
    '''
    
    webservice = Client(service)
    
    try:
        cat = webservice.get_events(starttime=st[0].stats.starttime - timebefore, endtime=st[0].stats.starttime + timeafter, minmagnitude=st[0].stats.sac.mag - 1.0, maxmagnitude=st[0].stats.sac.mag + 1.0)
        
    except FDSNException:
        print "No event found for stream startttime. Try adjusting time window."
        return
    
    except AttributeError:
        print "No stats.sac dictionary, attempting search based on time window alone..."
        
        try:
            cat = webservice.get_events(starttime=st[0].stats.starttime - timebefore, endtime=st[0].stats.starttime + timeafter)
            
        except FDSNException:
            print "No event found for stream startttime. Try adjusting time window."
            return
    
    if len(cat) > 1:
        print "Multiple events found for stream starttime. Try adjusting time window."
        print cat
        return
    
    event = cat[0]
    
    print event
    
    return event
Exemple #2
0
def get_all_mags(eventid):
    """Get all magnitudes for a given event ID.

    Args:
        eventid (str): ComCat Event ID.
    Returns:
        dict: Dictionary where keys are "magsrc-magtype" and values
              are magnitude value.

    """
    row = {}
    msg = ''
    client = Client('USGS')
    try:
        obsevent = client.get_events(eventid=eventid).events[0]
    except Exception as e:
        msg = 'Failed to download event %s, error "%s".' % (eventid, str(e))
    for mag in obsevent.magnitudes:
        magvalue = mag.mag
        magtype = mag.magnitude_type
        magsrc = get_mag_src(mag)
        colname = '%s-%s' % (magsrc, magtype)
        if colname in row:
            continue
        row[colname] = magvalue
    return (row, msg)
Exemple #3
0
    def test_redirection(self):
        """
        Tests the redirection of GET and POST requests. We redirect
        everything if not authentication is used.

        IRIS runs three services to test it:
            http://ds.iris.edu/files/redirect/307/station/1
            http://ds.iris.edu/files/redirect/307/dataselect/1
            http://ds.iris.edu/files/redirect/307/event/1
        """
        c = Client("IRIS", service_mappings={
            "station":
                "http://ds.iris.edu/files/redirect/307/station/1",
            "dataselect":
                "http://ds.iris.edu/files/redirect/307/dataselect/1",
            "event":
                "http://ds.iris.edu/files/redirect/307/event/1"},
            user_agent=USER_AGENT)

        st = c.get_waveforms(
            network="IU", station="ANMO", location="00", channel="BHZ",
            starttime=UTCDateTime("2010-02-27T06:30:00.000"),
            endtime=UTCDateTime("2010-02-27T06:30:01.000"))
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        inv = c.get_stations(
            starttime=UTCDateTime("2000-01-01"),
            endtime=UTCDateTime("2001-01-01"),
            network="IU", station="ANMO", level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))

        cat = c.get_events(starttime=UTCDateTime("2001-01-07T01:00:00"),
                           endtime=UTCDateTime("2001-01-07T01:05:00"),
                           catalog="ISC")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(cat)))

        # Also test the bulk requests which are done using POST requests.
        bulk = (("TA", "A25A", "", "BHZ",
                 UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")),
                ("TA", "A25A", "", "BHE",
                 UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")))
        st = c.get_waveforms_bulk(bulk, quality="B", longestonly=False)
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        starttime = UTCDateTime(1990, 1, 1)
        endtime = UTCDateTime(1990, 1, 1) + 10
        bulk = [
            ["IU", "ANMO", "", "BHE", starttime, endtime],
            ["IU", "CCM", "", "BHZ", starttime, endtime],
        ]
        inv = c.get_stations_bulk(bulk, level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))
Exemple #4
0
def get_events():
    try:
        return read_events(evname)
    except Exception:
        pass
    client = Client()
    events = client.get_events(starttime=t1, endtime=t2, latitude=lat,
                               longitude=lon, minradius=30, maxradius=90,
                               minmagnitude=6., maxmagnitude=6.5)
    events.write(evname, 'QUAKEML')
    return events
 def test_dist_mat_km(self):
     """Test spacial clustering."""
     from eqcorrscan.utils.clustering import dist_mat_km
     from obspy.clients.fdsn import Client
     from obspy import UTCDateTime
     client = Client("IRIS")
     starttime = UTCDateTime("2002-01-01")
     endtime = UTCDateTime("2002-01-02")
     cat = client.get_events(starttime=starttime, endtime=endtime,
                             minmagnitude=6, catalog="ISC")
     dist_mat = dist_mat_km(cat)
     self.assertEqual(len(dist_mat), len(cat))
 def test_space_time_cluster(self):
     """Test clustering in space and time."""
     from eqcorrscan.utils.clustering import space_time_cluster
     from obspy.clients.fdsn import Client
     from obspy import UTCDateTime
     client = Client("IRIS")
     starttime = UTCDateTime("2002-01-01")
     endtime = UTCDateTime("2002-01-02")
     cat = client.get_events(starttime=starttime, endtime=endtime,
                             minmagnitude=6, catalog="ISC")
     groups = space_time_cluster(catalog=cat, t_thresh=86400, d_thresh=1000)
     self.assertEqual(len([ev for group in groups for ev in group]),
                      len(cat))
Exemple #7
0
def cat4stream(stream, client_name, stime=None, etime=None, minmag=None, lat=None, lon=None, mindepth=None):

	client = Client(client_name)
	try:
		eventinfo 	= stream[0].stats.sh
		depth 		= eventinfo['DEPTH']+10
		lat 		= eventinfo['LAT']
		lon 		= eventinfo['LON']
		origin 		= eventinfo['ORIGIN']

		etime = origin + 300
		stime = origin - 300
		cat = client.get_events(starttime=stime, endtime=etime, maxdepth=depth, latitude=lat, longitude=lon, maxradius=0.5, mindepth=mindepth)

		return cat
	except:
		try:
			cat = client.get_events(starttime=stime, endtime=etime, latitude=lat, longitude=lon, minmagnitude=minmag, mindepth=mindepth)
			return cat
		except:
			print('No Catalog found')
			return
Exemple #8
0
    def test_download_write(self):
        """
        Function to download quakeML files from a range of datacenters and \
        attempt to write miniseed files
        """
        import os
        from eqcorrscan.utils import sfile_util
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.clients.fdsn import Client
            from obspy import read_events
            from obspy.clients.fdsn.header import FDSNException
        else:
            from obspy.fdsn import Client
            from obspy import readEvents as read_events
            from obspy.fdsn.header import FDSNException
        import warnings

        event_list = [('GEONET', '2016p008122'),
                      ('NCEDC', '72572665'),
                      ('USGS', 'nc72597260')]
        for event_info in event_list:
            client = Client(event_info[0])
            if event_info[0] == 'GEONET':
                try:
                    data_stream = client.\
                        _download('http://quakeml.geonet.org.nz/' +
                                  'quakeml/1.2/' + event_info[1])
                    data_stream.seek(0, 0)
                    event = read_events(data_stream, format="quakeml")
                    data_stream.close()
                except FDSNException:
                    warnings.warn('FDSNException')
                    continue
            else:
                try:
                    event = client.get_events(eventid=event_info[1],
                                              includearrivals=True)
                except FDSNException:
                    warnings.warn('FDSNException')
                    continue
            test_Sfile_name = sfile_util.eventtosfile(event, 'test', 'L', '.',
                                                      'null', overwrite=True)
            os.remove(test_Sfile_name)
        return True
Exemple #9
0
def main():
# 2. Get your events in an obspy Catalog object. Be sure to include the
#    arrivals, so that hypoDD has something to work on, as waveform
#    cross-correlation has not been implemented yet.
    client = Client("http://rdsa.knmi.nl")
    mycat = client.get_events( minmagnitude=0.0,
                               minlatitude=52.6,
                               minlongitude=6.0,
                               maxlatitude=53.8,
                               maxlongitude=7.5,
                               starttime=UTCDateTime("2015-01-01T00:00:00"),
                               includearrivals=True)

# 3. Initialize a hypoDD object. The working directory should contain the
#    binaries for ph2dt and hypoDD. You need the client argument to download
#    station metadata and waveforms* (*not implemented yet)
    myhypoDD=HypoDDObject(mycat,client,"./work")

# 4. Set values for ph2dt and hypoDD parameters (see manual for details), if
#    you don't want to use the default hypoDDutil values (you probably don't)
    myhypoDD.ph2dt_control.maxsep = 7.5
    myhypoDD.hypoDD_control.dist = 300
#   and so on. You can give values for all parameters, or for none.

# 5. Prepare the control files and input files
    myhypoDD.prepare_all()
# 6. Run ph2dt with the current configuration
    myhypoDD.run_ph2dt()
# 7. Run hypoDD with the current configuration
    myhypoDD.run_hypoDD()

# 8. Get the output in the form of a list of clusters (in the hypoDD sense)
#    a cluster is a catalog with some metadata, like whether hypoDD succeeded
#    with these events or how well connected* the cluster is (*not implemented
#    yet)
    clusters=myhypoDD.get_results()

    for cluster in clusters:
        print "hypoDD cluster ID: {}".format(cluster.hypoDD_id)
        print "cluster was relocated successfully: {}".format(
                                               cluster.successful_relocation
                                               )
        print cluster.catalog
        print
Exemple #10
0
def get_cat3(name="cat3.xml"):
    """
    a cat_name with a few events that have multiple origins/magnitudes
    """
    if os.path.exists(name):
        return
    from obspy.clients.fdsn import Client

    client = Client("IRIS")
    # times
    t1 = obspy.UTCDateTime("2016-09-03")
    t2 = obspy.UTCDateTime("2016-09-04")
    cat = client.get_events(
        starttime=t1,
        endtime=t2,
        minmagnitude=5.0,
        includeallmagnitudes=True,
        includeallorigins=True,
    )
    cat.write(name, "quakeml")
def get_events(lonlat,starttime,endtime):
    cat_file= 'DATA/catalog'+str(starttime).replace(' ','-')+'-'+str(endtime).replace(' ','-')+'.xml' 
    if os.path.exists(cat_file):
        catalog=read_events(cat_file)
    else:

        client = Client('ISC')
        kwargs = {'starttime': starttime, 'endtime': endtime, 
                  'latitude': lonlat[1], 'longitude': lonlat[0],
        # we can use distances from 15 degrees, see Levin et al.
                  'minradius': 15, 'maxradius': 90,
                  'minmagnitude': 5.5, 'maxmagnitude': 6.5}
        print "Following parameters for earthquake extraction will be used:"
        print 'starttime', starttime, 'endtime', endtime,'latitude', lonlat[1], 'longitude', lonlat[0],'minradius: 15', 'maxradius: 90','minmagnitude: 5.5', 'maxmagnitude : 6.5'
        catalog = client.get_events(**kwargs)
        catalog.write(cat_file,'QUAKEML')
        print "Catalog loaded"
        print "Run this process again using qsub"
        exit(0)
    return catalog 
Exemple #12
0
    def test_download_write(self):
        """
        Function to download quakeML files from a range of datacenters and \
        attempt to write miniseed files
        """
        import os
        from eqcorrscan.utils import sfile_util
        import obspy
        if int(obspy.__version__.split('.')[0]) >= 1:
            from obspy.clients.fdsn import Client
            from obspy import read_events
            from obspy.clients.fdsn.header import FDSNException
        else:
            from obspy.fdsn import Client
            from obspy import readEvents as read_events
            from obspy.fdsn.header import FDSNException
        import warnings

        event_list = [('GEONET', '2016p008122'),
                      ('NCEDC', '72572665'),
                      ('USGS', 'nc72597260')]
        for event_info in event_list:
            try:
                client = Client(event_info[0])
                if event_info[0] == 'GEONET':
                    data_stream = client.\
                        _download('http://quakeml.geonet.org.nz/' +
                                  'quakeml/1.2/' + event_info[1])
                    data_stream.seek(0, 0)
                    event = read_events(data_stream, format="quakeml")
                    data_stream.close()
                else:
                    event = client.get_events(eventid=event_info[1],
                                              includearrivals=True)
            except FDSNException:
                warnings.warn('FDSNException')
                continue
            test_Sfile_name = sfile_util.eventtosfile(event, 'test', 'L', '.',
                                                      'null', overwrite=True)
            os.remove(test_Sfile_name)
        return True
 def setUpClass(cls):
     log = logging.getLogger(mag_calc.__name__)
     cls._log_handler = MockLoggingHandler(level='DEBUG')
     log.addHandler(cls._log_handler)
     cls.log_messages = cls._log_handler.messages
     client = Client("GEONET")
     cls.event = client.get_events(eventid="2019p498440")[0]
     origin_time = cls.event.preferred_origin().time
     bulk = [(p.waveform_id.network_code, p.waveform_id.station_code,
              p.waveform_id.location_code, p.waveform_id.channel_code,
              origin_time - 10, origin_time + 120) for p in cls.event.picks]
     cls.inventory = client.get_stations_bulk(bulk, level='response')
     cls.st = Stream()
     for _bulk in bulk:
         try:
             cls.st += client.get_waveforms(*_bulk)
         except IncompleteRead:
             print(f"Could not download {_bulk}")
     cls.available_stations = len(
         {p.waveform_id.station_code
          for p in cls.event.picks})
Exemple #14
0
 def test_filter_picks(self):
     """ Test various methods of filetring picks in a catalog."""
     from obspy.clients.fdsn import Client
     from eqcorrscan.utils.catalog_utils import filter_picks
     from obspy import UTCDateTime
     client = Client(str("NCEDC"))
     t1 = UTCDateTime(2004, 9, 28)
     t2 = t1 + 86400
     catalog = client.get_events(starttime=t1,
                                 endtime=t2,
                                 minmagnitude=3,
                                 minlatitude=35.7,
                                 maxlatitude=36.1,
                                 minlongitude=-120.6,
                                 maxlongitude=-120.2,
                                 includearrivals=True)
     stations = ['BMS', 'BAP', 'PAG', 'PAN', 'PBI', 'PKY', 'YEG', 'WOF']
     channels = ['SHZ', 'SHN', 'SHE', 'SH1', 'SH2']
     networks = ['NC']
     locations = ['']
     top_n_picks = 5
     filtered_catalog = filter_picks(catalog=catalog,
                                     stations=stations,
                                     channels=channels,
                                     networks=networks,
                                     locations=locations,
                                     top_n_picks=top_n_picks)
     for event in filtered_catalog:
         for pick in event.picks:
             self.assertTrue(pick.waveform_id.station_code in stations)
             self.assertTrue(pick.waveform_id.channel_code in channels)
             self.assertTrue(pick.waveform_id.network_code in networks)
             self.assertTrue(pick.waveform_id.location_code in locations)
     filtered_catalog = filter_picks(catalog=catalog,
                                     top_n_picks=top_n_picks)
     filtered_stations = []
     for event in filtered_catalog:
         for pick in event.picks:
             filtered_stations.append(pick.waveform_id.station_code)
     self.assertEqual(len(list(set(filtered_stations))), top_n_picks)
Exemple #15
0
def getEvents4Station(stLat, stLon, startTime, endTime, minRad, maxRad,
                      minMag):
    ''' This will collect a set of events for a station.
        input: station latitude, station longitude, 
               starting and ending times(UTC date time),
               minimum and maximum degrees distance from station, 
               minimum magnitude
        output: a catalog of events
    '''
    from obspy import UTCDateTime
    from obspy.clients.fdsn import Client

    #stLat = 34.945910
    #stLon = -106.457200
    #startTime = UTCDateTime("2016-01-01T00:00:00")
    #endTime = UTCDateTime("2017-04-24T00:00:00")

    client = Client("IRIS")
    EventCatalog = client.get_events(starttime=startTime,endtime=endTime,\
                                 latitude=stLat,longitude=stLon, \
                                 minradius=minRad, maxradius=maxRad, \
                                 minmagnitude=minMag)
    return EventCatalog
Exemple #16
0
    def fetchMainshock(self):
        """
        Fetch event information.
        """
        if self.showProgress:
            print("Fetching mainshock information from data center...")

        datacenter = self.params.get("catalog")[0]
        if datacenter == "USGS":
            services = {
                'station': None,
                'event': "http://earthquake.usgs.gov/fdsnws/event/1",
                'dataselect': None
            }
            client = Client(datacenter, service_mappings=services, debug=False)
        else:
            client = Client(datacenter)
        catalog = client.get_events(eventid=self.params.get("mainshock"))
        event = catalog.events[0]

        event.write(self.params.get("files/mainshock"), format="QUAKEML")

        return
Exemple #17
0
    def setUpClass(cls):
        starttime = UTCDateTime(2019, 8, 12, 10)
        endtime = UTCDateTime(2019, 8, 13)
        client = Client("GEONET")
        catalog = client.get_events(starttime=starttime,
                                    endtime=endtime,
                                    latitude=-44.5,
                                    longitude=167.9,
                                    maxradius=0.2)
        catalog.events.sort(
            key=lambda e: (e.preferred_origin() or e.origins[0]).time)
        StationInfo = namedtuple("StationInfo",
                                 ["network", "station", "location"])
        picked_stations = [
            StationInfo(p.waveform_id.network_code, p.waveform_id.station_code,
                        p.waveform_id.location_code) for ev in catalog
            for p in ev.picks
        ]
        stations_to_download = [
            sta for sta, _ in Counter(picked_stations).most_common(5)
        ]
        streams = []
        for event in catalog[0:10]:  # Just get the first 10 events
            bulk = [(sta.network, sta.station, sta.location, "HH?",
                     event.preferred_origin().time - 10,
                     event.preferred_origin().time + 80)
                    for sta in stations_to_download]
            streams.append(client.get_waveforms_bulk(bulk))

        picked_stations = set(picked_stations)
        inv_bulk = [(sta.network, sta.station, sta.location, "HH?", starttime,
                     endtime) for sta in picked_stations]
        inventory = client.get_stations_bulk(inv_bulk, level="station")

        cls.streams = streams
        cls.catalog = catalog
        cls.inventory = inventory
def extract_eq_info(starttime, endtime, minmagnitude=minmagnitude):
    client = Client("IRIS")
    starttime = UTCDateTime(starttime)
    endtime = UTCDateTime(endtime)
    cat = client.get_events(starttime=starttime,
                            endtime=endtime,
                            minmagnitude=minmagnitude,
                            catalog="ISC")
    years,juldays,latitudes,longitudes,depths,magnitudes,magnitude_type,event_text,eq_time=[],[],[],[],[],[],[],[],[]

    for i in range(len(cat.events)):
        try:
            yr = cat.events[i].origins[0].time.year
            jd = cat.events[i].origins[0].time.julday
            lat = cat.events[i].origins[0].latitude
            lon = cat.events[i].origins[0].longitude
            dp = cat.events[i].origins[0].depth / 1000
            mg = cat.events[i].magnitudes[0].mag
            mg_type = cat.events[i].magnitudes[0].magnitude_type
            e_text = cat.events[i].event_descriptions[0].text
            e_time = str(cat.events[i].origins[0].time)
            x, y = merc(lon, lat)
            years.append(yr)
            juldays.append(jd)
            latitudes.append(lat)
            longitudes.append(lon)
            depths.append(dp)
            magnitudes.append(mg)
            magnitude_type.append(mg_type)
            event_text.append(e_text)
            eq_time.append(e_time)
        except:
            pass

    return (years, juldays, latitudes, longitudes, depths, magnitudes,
            magnitude_type, event_text, eq_time)
Exemple #19
0
def get_catalog(starttime,
                endtime,
                base_url='IRIS',
                _catalog='NEIC PDE',
                **kwargs):
    ''' Get catalog in specified time


    Parameters
    ----------
    starttime : str
        start time.

    endtime : str
        end time
    
    base_url : str, optional
        base url. default is 'IRIS'
        
    catalog : str, optional
        catalog


    Returns
    -------
    catalog : `obspy.core.event.catalog.Catalog'

    '''
    client = Client(base_url)
    starttime = UTCDateTime(starttime)
    endtime = UTCDateTime(endtime)
    catalog = client.get_events(starttime=starttime,
                                endtime=endtime,
                                catalog=_catalog,
                                **kwargs)
    return catalog
Exemple #20
0
 def test_download_various_methods(self):
     """
     Will download data from server and store in various databases,
     then create templates using the various methods.
     """
     client = Client('GEONET')
     # get the events
     catalog = client.get_events(eventid='2016p008194')
     # Select 3 channels to use and download
     sta_chans = [(pick.waveform_id.station_code,
                   pick.waveform_id.channel_code)
                  for pick in catalog[0].picks[0:3]]
     t1 = UTCDateTime(catalog[0].origins[0].time.date)
     t2 = t1 + 86400
     bulk = [('NZ', sta_chan[0], '*', sta_chan[1], t1, t2)
             for sta_chan in sta_chans]
     continuous_st = client.get_waveforms_bulk(bulk)
     continuous_st.merge(fill_value=0)
     # Test multi_template_gen
     templates = multi_template_gen(catalog, continuous_st, length=3)
     self.assertEqual(len(templates), 1)
     # Test without an event
     templates = multi_template_gen(Catalog(), continuous_st, length=3)
     self.assertEqual(len(templates), 0)
client = Client(base_url = "http://arclink.ethz.ch", user='******', password='******')
starttime = UTCDateTime("2014-01-01")
endtime = UTCDateTime()
inv = client.get_stations(network="CH", station="SIOM", starttime=starttime, endtime=endtime, level="station") #choose between other level, like: "network" "channel" "response"
print(type(inv)) #Inventory
print(inv)
network = inv[0]
print(network)
station = network[0]
print(station)
#inv.plot(projection="local")
centerlat = station.latitude
centerlong = station.longitude

#get specified event
cat = client.get_events(starttime=starttime, endtime=endtime, latitude=centerlat, longitude=centerlong, maxradius=1, minmagnitude=1)#, filename="sion_events.xml")
#print len(cat)
evtnum = cat.count()
print(type(cat)) #Catalog
print(cat) #add , CatalogObject.__str__(print_all=True) to print all events
#cat.plot() #add also resouces like: projection="local"
focaltime = []
hypolon = []
hypolat = []
hypodep = []
eqmag = []
for x in range(0, evtnum-1):
	event = cat[x]
	focaltime.append(event.origins[0].time)
	hypolon.append(event.origins[0].longitude)
	hypolat.append(event.origins[0].latitude)
Exemple #22
0
    def test_redirection_auth(self):
        """
        Tests the redirection of GET and POST requests using authentication.

        By default these should not redirect and an exception is raised.
        """
        # Clear the cache.
        Client._Client__service_discovery_cache.clear()

        # The error will already be raised during the initialization in most
        # cases.
        self.assertRaises(
            FDSNRedirectException,
            Client, "IRIS", service_mappings={
                "station": "http://ds.iris.edu/files/redirect/307/station/1",
                "dataselect":
                    "http://ds.iris.edu/files/redirect/307/dataselect/1",
                "event": "http://ds.iris.edu/files/redirect/307/event/1"},
            user="******", password="******",
            user_agent=USER_AGENT)

        # The force_redirect flag overwrites that behaviour.
        c_auth = Client("IRIS", service_mappings={
            "station":
                "http://ds.iris.edu/files/redirect/307/station/1",
            "dataselect":
                "http://ds.iris.edu/files/redirect/307/dataselect/1",
            "event":
                "http://ds.iris.edu/files/redirect/307/event/1"},
            user="******", password="******",
            user_agent=USER_AGENT, force_redirect=True)

        st = c_auth.get_waveforms(
            network="IU", station="ANMO", location="00", channel="BHZ",
            starttime=UTCDateTime("2010-02-27T06:30:00.000"),
            endtime=UTCDateTime("2010-02-27T06:30:01.000"))
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        inv = c_auth.get_stations(
            starttime=UTCDateTime("2000-01-01"),
            endtime=UTCDateTime("2001-01-01"),
            network="IU", station="ANMO", level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))

        cat = c_auth.get_events(starttime=UTCDateTime("2001-01-07T01:00:00"),
                                endtime=UTCDateTime("2001-01-07T01:05:00"),
                                catalog="ISC")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(cat)))

        # Also test the bulk requests which are done using POST requests.
        bulk = (("TA", "A25A", "", "BHZ",
                 UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")),
                ("TA", "A25A", "", "BHE",
                 UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")))
        st = c_auth.get_waveforms_bulk(bulk, quality="B", longestonly=False)
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        starttime = UTCDateTime(1990, 1, 1)
        endtime = UTCDateTime(1990, 1, 1) + 10
        bulk = [
            ["IU", "ANMO", "", "BHE", starttime, endtime],
            ["IU", "CCM", "", "BHZ", starttime, endtime],
        ]
        inv = c_auth.get_stations_bulk(bulk, level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))
Exemple #23
0
    def test_download_urls_for_custom_mapping(self, download_url_mock):
        """
        Tests the downloading of data with custom mappings.
        """
        base_url = "http://example.com"

        # More extensive mock setup simulation service discovery.
        def custom_side_effects(*args, **kwargs):
            if "version" in args[0]:
                return 200, "1.0.200"
            elif "event" in args[0]:
                with open(os.path.join(
                        self.datapath, "2014-01-07_iris_event.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            elif "station" in args[0]:
                with open(os.path.join(
                        self.datapath,
                        "2014-01-07_iris_station.wadl"), "rb") as fh:
                    return 200, fh.read()
            elif "dataselect" in args[0]:
                with open(os.path.join(
                        self.datapath,
                        "2014-01-07_iris_dataselect.wadl"), "rb") as fh:
                    return 200, fh.read()
            return 404, None

        download_url_mock.side_effect = custom_side_effects

        # Some custom urls
        base_url_event = "http://example.com/beta/event_service/11"
        base_url_station = "http://example.org/beta2/station/7"
        base_url_ds = "http://example.edu/beta3/dataselect/8"

        # An exception will be raised if not actual WADLs are returned.
        # Catch warnings to avoid them being raised for the tests.
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            c = Client(base_url=base_url, service_mappings={
                "event": base_url_event,
                "station": base_url_station,
                "dataselect": base_url_ds,
            })
        for warning in w:
            self.assertTrue("Could not parse" in str(warning) or
                            "cannot deal with" in str(warning))

        # Test the dataselect downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_waveforms("A", "B", "C", "D", UTCDateTime() - 100,
                            UTCDateTime())
        except:
            pass
        self.assertTrue(
            base_url_ds in download_url_mock.call_args_list[0][0][0])

        # Test the station downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_stations()
        except:
            pass
        self.assertTrue(
            base_url_station in download_url_mock.call_args_list[0][0][0])

        # Test the event downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_events()
        except:
            pass
        self.assertTrue(
            base_url_event in download_url_mock.call_args_list[0][0][0])
Exemple #24
0
def run_tutorial(plot=False,
                 multiplex=True,
                 return_streams=False,
                 cores=4,
                 verbose=False):
    """
    Run the tutorial.

    :return: detections
    """
    client = Client("GEONET", debug=verbose)
    cat = client.get_events(minlatitude=-40.98,
                            maxlatitude=-40.85,
                            minlongitude=175.4,
                            maxlongitude=175.5,
                            starttime=UTCDateTime(2016, 5, 1),
                            endtime=UTCDateTime(2016, 5, 20))
    print(f"Downloaded a catalog of {len(cat)} events")
    # This gives us a catalog of events - it takes a while to download all
    # the information, so give it a bit!
    # We will generate a five station, multi-channel detector.
    cat = filter_picks(catalog=cat, top_n_picks=5)
    stachans = list(
        set([(pick.waveform_id.station_code, pick.waveform_id.channel_code)
             for event in cat for pick in event.picks]))
    # In this tutorial we will only work on one cluster, defined spatially.
    # You can work on multiple clusters, or try to whole set.
    clusters = catalog_cluster(catalog=cat,
                               metric="distance",
                               thresh=2,
                               show=False)
    # We will work on the largest cluster
    cluster = sorted(clusters, key=lambda c: len(c))[-1]
    # This cluster contains 32 events, we will now download and trim the
    # waveforms.  Note that each chanel must start at the same time and be the
    # same length for multiplexing.  If not multiplexing EQcorrscan will
    # maintain the individual differences in time between channels and delay
    # the detection statistics by that amount before stacking and detection.
    client = Client('GEONET')
    design_set = []
    st = Stream()
    for event in cluster:
        print(f"Downloading for event {event.resource_id.id}")
        bulk_info = []
        t1 = event.origins[0].time
        t2 = t1 + 25.1  # Have to download extra data, otherwise GeoNet will
        # trim wherever suits.
        t1 -= 0.1
        for station, channel in stachans:
            try:
                st += client.get_waveforms('NZ', station, '*',
                                           channel[0:2] + '?', t1, t2)
            except IncompleteRead:
                print(f"Could not download for {station} {channel}")
    print(f"Downloaded {len(st)} channels")
    for event in cluster:
        t1 = event.origins[0].time
        t2 = t1 + 25
        design_set.append(st.copy().trim(t1, t2))
    # Construction of the detector will process the traces, then align them,
    # before multiplexing.
    print("Making detector")
    detector = subspace.Detector()
    detector.construct(streams=design_set,
                       lowcut=2.0,
                       highcut=9.0,
                       filt_order=4,
                       sampling_rate=20,
                       multiplex=multiplex,
                       name='Wairarapa1',
                       align=True,
                       reject=0.2,
                       shift_len=6,
                       plot=plot).partition(9)
    print("Constructed Detector")
    if plot:
        detector.plot()
    # We also want the continuous stream to detect in.
    t1 = UTCDateTime(2016, 5, 11, 19)
    t2 = UTCDateTime(2016, 5, 11, 20)
    # We are going to look in a single hour just to minimize cost, but you can
    # run for much longer.
    bulk_info = [('NZ', stachan[0], '*', stachan[1][0] + '?' + stachan[1][-1],
                  t1, t2) for stachan in detector.stachans]
    print("Downloading continuous data")
    st = client.get_waveforms_bulk(bulk_info)
    st.merge().detrend('simple').trim(starttime=t1, endtime=t2)
    # We set a very low threshold because the detector is not that great, we
    # haven't aligned it particularly well - however, at this threshold we make
    # two real detections.
    print("Computing detections")
    detections, det_streams = detector.detect(st=st,
                                              threshold=0.4,
                                              trig_int=2,
                                              extract_detections=True,
                                              cores=cores)
    if return_streams:
        return detections, det_streams
    else:
        return detections
Exemple #25
0
def event_info(input_dics):
    """
    get event(s) info for event_based request
    :param input_dics:
    :return:
    """
    try:
        evlatmin = input_dics['evlatmin']
        evlatmax = input_dics['evlatmax']
        evlonmin = input_dics['evlonmin']
        evlonmax = input_dics['evlonmax']

        evlat = input_dics['evlat']
        evlon = input_dics['evlon']
        evradmin = input_dics['evradmin']
        evradmax = input_dics['evradmax']

        event_switch = 'fdsn'
        event_url = input_dics['event_catalog']
        if input_dics['read_catalog']:
            event_switch = 'local'
        event_fdsn_cat = None

        if event_url.lower() == 'gcmt_combo':
            event_switch = 'gcmt_combo'
        if event_url.lower() == 'neic_usgs':
            event_switch = 'neic_usgs'
        if event_url.lower() == 'isc':
            event_switch = 'isc_cat'

        print('\nEvent(s) are based on:\t%s' % input_dics['event_catalog'])

        if event_switch == 'fdsn':
            client_fdsn = Client_fdsn(base_url=event_url)
            events_QML = client_fdsn.get_events(
                minlatitude=evlatmin,
                maxlatitude=evlatmax,
                minlongitude=evlonmin,
                maxlongitude=evlonmax,
                latitude=evlat,
                longitude=evlon,
                minradius=evradmin,
                maxradius=evradmax,
                mindepth=input_dics['min_depth'],
                maxdepth=input_dics['max_depth'],
                starttime=input_dics['min_date'],
                endtime=input_dics['max_date'],
                minmagnitude=input_dics['min_mag'],
                maxmagnitude=input_dics['max_mag'],
                orderby='time-asc',
                catalog=event_fdsn_cat,
                magnitudetype=input_dics['mag_type'],
                includeallorigins=None,
                includeallmagnitudes=None,
                includearrivals=None,
                eventid=None,
                limit=None,
                offset=None,
                contributor=None,
                updatedafter=None)

        elif event_switch == 'gcmt_combo':
            events_QML = \
                gcmt_catalog(input_dics['min_date'],
                             input_dics['max_date'],
                             evlatmin, evlatmax, evlonmin, evlonmax,
                             evlat, evlon, evradmin, evradmax,
                             input_dics['min_depth'],
                             input_dics['max_depth'],
                             input_dics['min_mag'],
                             input_dics['max_mag'])

        elif event_switch == 'neic_usgs':
            events_QML = \
                neic_catalog_urllib(input_dics['min_date'],
                                    input_dics['max_date'],
                                    evlatmin, evlatmax, evlonmin, evlonmax,
                                    evlat, evlon, evradmin, evradmax,
                                    input_dics['min_depth'],
                                    input_dics['max_depth'],
                                    input_dics['min_mag'],
                                    input_dics['max_mag'])

        elif event_switch == 'isc_cat':
            events_QML = \
                isc_catalog(bot_lat=evlatmin, top_lat=evlatmax,
                            left_lon=evlonmin, right_lon=evlonmax,
                            ctr_lat=evlat, ctr_lon=evlon,
                            radius=evradmax,
                            start_time=input_dics['min_date'],
                            end_time=input_dics['max_date'],
                            min_dep=input_dics['min_depth'],
                            max_dep=input_dics['max_depth'],
                            min_mag=input_dics['min_mag'],
                            max_mag=input_dics['max_mag'],
                            mag_type=input_dics['mag_type'],
                            req_mag_agcy='Any',
                            rev_comp=input_dics['isc_rev_comp'])

        elif event_switch == 'local':
            events_QML = readEvents(input_dics['read_catalog'])

        else:
            sys.exit('[ERROR] %s is not supported' %
                     input_dics['event_catalog'])

        for i in range(len(events_QML)):
            if not hasattr(events_QML.events[i], 'preferred_mag'):
                events_QML.events[i].preferred_mag = \
                    events_QML.events[i].magnitudes[0].mag
                events_QML.events[i].preferred_mag_type = \
                    events_QML.events[i].magnitudes[0].magnitude_type
                events_QML.events[i].preferred_author = 'None'
            else:
                if not hasattr(events_QML.events[i], 'preferred_author'):
                    if events_QML.events[i].preferred_magnitude(
                    ).creation_info:
                        events_QML.events[i].preferred_author = \
                            events_QML.events[i].preferred_magnitude().creation_info.author
                    elif events_QML.events[i].magnitudes[0].creation_info:
                        events_QML.events[i].preferred_author = \
                            events_QML.events[i].magnitudes[0].creation_info.author
        # no matter if list was passed or requested, sort catalogue,
        # plot events and proceed
        events_QML = sort_catalogue(events_QML)
        events = qml_to_event_list(events_QML)

    except Exception as error:
        print(60 * '-')
        print('[WARNING] %s' % error)
        print(60 * '-')
        events = []
        events_QML = []

    for i in range(len(events)):
        events[i]['t1'] = events[i]['datetime'] - input_dics['preset']
        events[i]['t2'] = events[i]['datetime'] + input_dics['offset']

    return events, events_QML
Exemple #26
0
def fdsnws(base_url="http://arclink.ethz.ch:8080",
                      endafter=40.,
                      maxradius=.6,
                      location='*',
                      channel='HNZ,HNE,HNN,HGZ,HGE,HGN,HHZ,HHE,HHN,EHZ,EHE,EHN,SHZ,SHE,SHN',
                      stations_base_url=None,
                      waveforms_base_url=None,
                      quality=None,
                      minimumlength=None,
                      longestonly=None,
           correction_method = remove_sensitivity,
         eventid=None,
                      **get_events_options):
    
    
    # First import :
    from obspy.clients.fdsn import Client
    fdsnclient = Client(base_url)
    
    # eventid in URL case
    if eventid is  None:
        eventid = 'smi:ch.ethz.sed/sc3a/2017epaqsp'
        print('Picks default eventid:',eventid)
    elif '#' in eventid :
        eventid = eventid.split('#')[-1]
        print('Picks eventid in URL format:',eventid)
    
    # Special clients systems
    stationsclient = fdsnclient
    waveformsclient = fdsnclient
    if stations_base_url:
        stationsclient = Client(stations_base_url)
        if not waveforms_base_url:
            waveformsclient = Client(stations_base_url)
    if waveforms_base_url:
        waveformsclient = Client(waveforms_base_url)
        if not stations_base_url:
            stationsclient = Client(waveforms_base_url)
    


    # Load event
    fdsnclient.get_events(eventid=eventid,format='sc3ml',filename='events.xml', **get_events_options)
    eventstreams = {'catalog': obspy.read_events('events.xml',format='sc3ml'),
                    'inventory': obspy.core.inventory.Inventory([],None),
                    'raw' : obspy.core.Stream()}
    if eventstreams['catalog'] is None:
        print('catalog is',eventstreams['catalog'])
    for output in ['catalog','inventory','raw']:
        eventstreams[output].output=output
    
    for event in eventstreams['catalog'].events :
        
        # Load stations
        t=event.preferred_origin().time
        try:
            inventory = stationsclient.get_stations(level='station',
                                                startbefore=t,
                                                endafter=t+endafter,
                                                latitude=event.preferred_origin().latitude,
                                                longitude=event.preferred_origin().longitude,
                                                maxradius=maxradius,
                                                location=location,
                                                channel=channel)
        except:
            print('No station found for event:')
            print(event)
            print('Using client:')
            print(stationsclient)
            continue
        # Load waveforms
        addons = [location, channel] + [t,t+endafter]
        bulk = [tuple(station.split()[0].split('.')[:2]+addons) for station in inventory.get_contents()['stations']]
        try:
            waveforms = waveformsclient.get_waveforms_bulk(bulk,
                                                      attach_response=True,
                                                      quality=quality,
                                                      minimumlength=minimumlength,
                                                      longestonly=longestonly)
        except:
            print('No waveform found for request:')
            print(bulk)
            print('Using client:')
            print(waveformsclient)
            continue
        # Improve waveforms attributes
        for trace in waveforms:
            station = inventory.select(network=trace.stats.network,
                                       station=trace.stats.station).networks[0].stations[0]
            trace.stats.coordinates = {'latitude':station.latitude,
                                       'longitude':station.longitude,
                                       'elevation':station.elevation}
            distance = obspy.geodetics.base.gps2dist_azimuth(station.latitude,
                                                             station.longitude,
                                                             event.preferred_origin().latitude,
                                                             event.preferred_origin().longitude)[0]
            distance = ((distance**2+(trace.stats.coordinates['elevation']*-1)**2.)**.5)
            distance = distance/len(eventstreams['catalog'].events)
            if not hasattr(trace.stats, 'distance'):
                trace.stats.distance = 0.
            trace.stats.distance += distance

        eventstreams['inventory'] += inventory
        eventstreams['raw'] += waveforms

    eventstreams['raw'].sort(keys=['distance'])

    if correction_method:
        eventstreams = correction_method(eventstreams)

    return eventstreams
    def obtain_events(self,
                      catalogxmlloc,
                      catalogtxtloc,
                      minmagnitude=5.5,
                      maxmagnitude=9.5):

        ## Check for the station information
        if os.path.exists(self.inventorytxtfile):
            invent_df = pd.read_csv(self.inventorytxtfile,
                                    sep="|",
                                    keep_default_na=False,
                                    na_values=[""])
            total_stations = invent_df.shape[0]
            if invent_df.shape[0] == 0:
                self.logger.error("No data available, exiting...")
                sys.exit()
        else:
            self.logger.error("No data available, exiting...")
            sys.exit()

        tot_evnt_stns = 0
        if not self.inv:
            self.logger.info(
                "Reading station inventory to obtain events catalog")
            try:
                # Read the station inventory
                self.inv = read_inventory(self.inventoryfile,
                                          format="STATIONXML")
            except Exception as exception:
                self.logger.error("No available data", exc_info=True)
                sys.exit()
        # list all the events during the station active time
        self.staNamesNet, staLats, staLons = [], [], []
        count = 1
        for net in self.inv:
            for sta in net:
                network = net.code  #network name
                station = sta.code  #station name
                print("\n")
                self.logger.info(
                    f"{count}/{total_stations} Retrieving event info for {network}-{station}"
                )
                count += 1
                self.staNamesNet.append(f"{network}_{station}")

                sta_lat = sta.latitude  #station latitude
                staLats.append(sta_lat)

                sta_lon = sta.longitude  #station longitude
                staLons.append(sta_lon)

                sta_sdate = sta.start_date  #station start date
                sta_edate = sta.end_date  #station end date
                # sta_edate_str = sta_edate
                if not sta_edate:
                    sta_edate = UTC("2599-12-31T23:59:59")
                    # sta_edate_str = "2599-12-31T23:59:59"

                stime, etime = date2time(
                    sta_sdate, sta_edate)  #station start and end time in UTC

                catalogxml = catalogxmlloc + f'{network}-{station}-{sta_sdate.year}-{sta_edate.year}-{self.method}-{self.method}_events.xml'  #xml catalog
                # self.allcatalogxml.append(catalogxml)
                catalogtxt = catalogtxtloc + f'{network}-{station}-{sta_sdate.year}-{sta_edate.year}-events-info-{self.method}.txt'  #txt catalog
                if not os.path.exists(catalogxml) and not os.path.exists(
                        catalogtxt):
                    self.logger.info(
                        f"Obtaining catalog: {self.method}: {network}-{station}-{sta_sdate.year}-{sta_edate.year}"
                    )
                    kwargs = {
                        'starttime': stime,
                        'endtime': etime,
                        'latitude': sta_lat,
                        'longitude': sta_lon,
                        'minradius': self.minradius,
                        'maxradius': self.maxradius,
                        'minmagnitude': minmagnitude,
                        'maxmagnitude': maxmagnitude
                    }
                    client = Client('IRIS')

                    try:
                        catalog = client.get_events(**kwargs)
                    except:
                        self.logger.warning(
                            "ConnectionResetError while obtaining the events from the client - IRIS"
                        )
                        continue
                    catalog.write(catalogxml, 'QUAKEML')  #writing xml catalog

                    tot_evnt_stns += len(catalog)

                    evtimes,evlats,evlons,evdps,evmgs,evmgtps=[],[],[],[],[],[]
                    self.logger.info("Writing the event data into a text file")

                    with open(catalogtxt, 'w') as f:
                        f.write('evtime,evlat,evlon,evdp,evmg\n')
                        for cat in catalog:
                            try:
                                try:
                                    evtime, evlat, evlon, evdp, evmg, evmgtp = cat.origins[
                                        0].time, cat.origins[
                                            0].latitude, cat.origins[
                                                0].longitude, cat.origins[
                                                    0].depth / 1000, cat.magnitudes[
                                                        0].mag, cat.magnitudes[
                                                            0].magnitude_type
                                except:
                                    evtime, evlat, evlon, evdp, evmg, evmgtp = cat.origins[
                                        0].time, cat.origins[
                                            0].latitude, cat.origins[
                                                0].longitude, cat.origins[
                                                    0].depth / 1000, cat.magnitudes[
                                                        0].mag, "Mww"
                                evtimes.append(str(evtime))
                                evlats.append(float(evlat))
                                evlons.append(float(evlon))
                                evdps.append(float(evdp))
                                evmgs.append(float(evmg))
                                evmgtps.append(str(evmgtp))
                                f.write(
                                    '{},{:.4f},{:.4f},{:.1f},{:.1f}\n'.format(
                                        evtime, evlat, evlon, evdp,
                                        evmg))  #writing txt catalog

                            except Exception as exception:
                                self.logger.warning(
                                    f"Unable to write for {evtime}")
                    self.logger.info(
                        "Finished writing the event data into a text and xml file"
                    )
                else:
                    self.logger.info(
                        f"{catalogxml.split('/')[-1]} and {catalogtxt.split('/')[-1]} already exists!"
                    )
Exemple #28
0
            '_' + str(eve.origins[0].time.hour).zfill(2) + '_' + str(eve.origins[0].time.minute).zfill(2) + '_' + str(int(round(1./f0,0))) + '.pdf', format='PDF', dpi=400)
    plt.clf()
    feve.close()
    num_lines = sum(1 for line in open(fstring))
    if num_lines <= 1:
        os.remove(fstring)
    return


def multifun(double):
    proceve(double[0], double[1])
    return


cat = client.get_events(starttime=stime,
                        endtime=etime,
                        minmagnitude=6.5,
                        maxdepth=50.)

stations = getstalist(sp, stime, net)
stations = ['HIA']

from multiprocessing import Pool

pool = Pool(20)

#cat = cat[:2]
for idx, eve in enumerate(cat):
    doubles = []
    print('One event: ' + str(idx) + ' of ' + str(len(cat)))
    for sta in stations:
        doubles.append([eve, sta])
os.chdir('/Users/vidale/Documents/Research/BasinsLA/New_py')

min_mag = 3.5
min_lat = 33.75
max_lat = 34.2
min_lon = -118.5
max_lon = -117.75
t1 = UTCDateTime("1996-01-01T00:00:00")
t2 = UTCDateTime("2022-01-01T00:00:00")

fname_cat = 'LAB.quakeml2'

cat = client.get_events(starttime=t1,
                        endtime=t2,
                        minmagnitude=min_mag,
                        minlatitude=min_lat,
                        maxlatitude=max_lat,
                        minlongitude=min_lon,
                        maxlongitude=max_lon)
#ev_lon   = catalog[0].origins[0].longitude
#ev_lat   = catalog[0].origins[0].latitude
#ev_depth = catalog[0].origins[0].depth
#ev_t      = catalog[0].origins[0].time
print('event:', cat)
# print(cat.__str__(print_all=True))
cat.write(fname_cat, format='QUAKEML')
# os.system("pwd")

# plots don't work
#   File "/Users/vidale/opt/anaconda3/lib/python3.7/os.py", line 678, in __getitem__
#     raise KeyError(key) from None
Exemple #30
0
    def test_redirection(self):
        """
        Tests the redirection of GET and POST requests. We redirect
        everything if not authentication is used.

        IRIS runs three services to test it:
            http://ds.iris.edu/files/redirect/307/station/1
            http://ds.iris.edu/files/redirect/307/dataselect/1
            http://ds.iris.edu/files/redirect/307/event/1
        """
        c = Client("IRIS",
                   service_mappings={
                       "station":
                       "http://ds.iris.edu/files/redirect/307/station/1",
                       "dataselect":
                       "http://ds.iris.edu/files/redirect/307/dataselect/1",
                       "event": "http://ds.iris.edu/files/redirect/307/event/1"
                   },
                   user_agent=USER_AGENT)

        st = c.get_waveforms(network="IU",
                             station="ANMO",
                             location="00",
                             channel="BHZ",
                             starttime=UTCDateTime("2010-02-27T06:30:00.000"),
                             endtime=UTCDateTime("2010-02-27T06:30:01.000"))
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        inv = c.get_stations(starttime=UTCDateTime("2000-01-01"),
                             endtime=UTCDateTime("2001-01-01"),
                             network="IU",
                             station="ANMO",
                             level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))

        cat = c.get_events(starttime=UTCDateTime("2001-01-07T01:00:00"),
                           endtime=UTCDateTime("2001-01-07T01:05:00"),
                           catalog="ISC")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(cat)))

        # Also test the bulk requests which are done using POST requests.
        bulk = (("TA", "A25A", "", "BHZ", UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")),
                ("TA", "A25A", "", "BHE", UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")))
        st = c.get_waveforms_bulk(bulk, quality="B", longestonly=False)
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        starttime = UTCDateTime(1990, 1, 1)
        endtime = UTCDateTime(1990, 1, 1) + 10
        bulk = [
            ["IU", "ANMO", "", "BHE", starttime, endtime],
            ["IU", "CCM", "", "BHZ", starttime, endtime],
        ]
        inv = c.get_stations_bulk(bulk, level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))
Exemple #31
0
    def test_download_urls_for_custom_mapping(self, download_url_mock):
        """
        Tests the downloading of data with custom mappings.
        """
        base_url = "http://example.com"

        # More extensive mock setup simulation service discovery.
        def custom_side_effects(*args, **kwargs):
            if "version" in args[0]:
                return 200, "1.0.200"
            elif "event" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_event.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            elif "station" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_station.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            elif "dataselect" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_dataselect.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            return 404, None

        download_url_mock.side_effect = custom_side_effects

        # Some custom urls
        base_url_event = "http://example.com/beta/event_service/11"
        base_url_station = "http://example.org/beta2/station/7"
        base_url_ds = "http://example.edu/beta3/dataselect/8"

        # An exception will be raised if not actual WADLs are returned.
        # Catch warnings to avoid them being raised for the tests.
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            c = Client(base_url=base_url,
                       service_mappings={
                           "event": base_url_event,
                           "station": base_url_station,
                           "dataselect": base_url_ds,
                       })
        for warning in w:
            self.assertTrue("Could not parse" in str(warning)
                            or "cannot deal with" in str(warning))

        # Test the dataselect downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_waveforms("A", "B", "C", "D",
                            UTCDateTime() - 100, UTCDateTime())
        except:
            pass
        self.assertTrue(
            base_url_ds in download_url_mock.call_args_list[0][0][0])

        # Test the station downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_stations()
        except:
            pass
        self.assertTrue(
            base_url_station in download_url_mock.call_args_list[0][0][0])

        # Test the event downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_events()
        except:
            pass
        self.assertTrue(
            base_url_event in download_url_mock.call_args_list[0][0][0])
Exemple #32
0
def plotenvelopediffhist2d(comp,
                           ref,
                           limit=999999999,
                           bins=99,
                           mode='rel',
                           todo=[{
                               'location': 'EA',
                               'channel': 'Z',
                               'gm': 'Acc',
                               'unit': r'm/s$^2$'
                           }, {
                               'location': 'EV',
                               'channel': 'Z',
                               'gm': 'Vel',
                               'unit': r'm/s'
                           }, {
                               'location': 'ED',
                               'channel': 'Z',
                               'gm': 'Disp',
                               'unit': r'm'
                           }]):

    f = matplotlib.pyplot.figure(figsize=(12, 12))
    axes = f.subplots(3, 2)  #,sharex=True)

    locations = [
        numpy.asarray([str(tr.stats.location) for tr in comp]),
        numpy.asarray([str(tr.stats.location) for tr in ref])
    ]
    orientations = [
        numpy.asarray([str(tr.stats.channel[-2]) for tr in comp]),
        numpy.asarray([str(tr.stats.channel[-1]) for tr in ref])
    ]
    outliers = []
    for itd, td in enumerate(todo):

        mask = (locations[0] == td['location']) & (orientations[0]
                                                   == td['channel'])
        stream = [comp[m] for m in numpy.where(mask)[0]
                  ]  #comp.select(location=td['location'])

        mask = (locations[1] == td['location']) & (orientations[1]
                                                   == td['channel'])
        refstream = [ref[m] for m in numpy.where(mask)[0]
                     ]  #ref.select(location=td['location'])

        n = 0
        diffs = []
        env = []
        times = []

        ids = numpy.asarray([str(tr.id) for tr in refstream])
        starttimes = numpy.asarray(
            [tr.stats.starttime.timestamp for tr in refstream])
        endtimes = numpy.asarray(
            [tr.stats.endtime.timestamp for tr in refstream])

        for trace in stream:
            mask = (str(trace.id[:-1])
                    == ids) & (tr.stats.starttime.timestamp < endtimes) & (
                        tr.stats.endtime.timestamp > starttimes)
            reftrace = [refstream[m] for m in numpy.where(mask)[0]]

            if False:
                reftrace = select(refstream,
                                  endafter=trace.stats.starttime,
                                  startbefore=trace.stats.endtime,
                                  id=trace.id[:-1])

            for rtrace in reftrace:

                iref, itr, starttime, npts = overlap(rtrace, trace)
                mask = numpy.isnan(trace.data) | numpy.isnan(rtrace.data)
                mask = [not i for i in mask]
                iref = iref & mask
                itr = itr & mask
                [diffs.append(s) for s in trace.data[itr]]
                [env.append(s) for s in rtrace.data[iref]]
                [times.append(s) for s in rtrace.times("utcdatetime")[iref]]

            if len(diffs) >= limit:
                print('REACHED LIMIT!')
                break

        for i in [0, 1]:
            axes[itd, i].grid()
            axes[itd, i].set_yscale('log')
            axes[itd,
                 i].set_ylabel('%s scenvelope (%s)' % (td['gm'], td['unit']))
            if i == 1:
                d = 100. * numpy.asarray(diffs) / numpy.asarray(env)
                outliers.append(
                    numpy.asarray(times)[(d > 90.) &
                                         (numpy.asarray(env) > max(env) * .9)])
                h, x, y, im = axes[itd, i].hist2d(
                    d,
                    env,
                    bins=[
                        numpy.linspace(min(d), max(d), bins),
                        numpy.logspace(numpy.log10(min(env)),
                                       numpy.log10(max(env)), bins)
                    ],
                    norm=matplotlib.colors.LogNorm(),
                    normed=True)
                im.set_data(x, y, im.get_array() * 1. / sum(sum(h)))
                im.set_norm(matplotlib.colors.LogNorm())
                axes[itd, i].set_xlabel('%s diff (%s)' % (td['gm'], '%'))

            else:
                h, x, y, im = axes[itd, i].hist2d(
                    diffs,
                    env,
                    bins=[
                        numpy.linspace(min(diffs), max(diffs), bins),
                        numpy.logspace(numpy.log10(min(env)),
                                       numpy.log10(max(env)), bins)
                    ],
                    norm=matplotlib.colors.LogNorm(),
                    normed=True)

                im.set_data(x, y, im.get_array() * 1. / sum(sum(h)))
                im.set_norm(matplotlib.colors.LogNorm())
                axes[itd,
                     i].set_xlabel('%s diff (%s)' % (td['gm'], td['unit']))

            cb = matplotlib.pyplot.colorbar(im, ax=axes[itd, i])
            cb.ax.set_ylabel('Probability')
    f.tight_layout()
    #from obspy.clients.fdsn import Client
    client = Client("http://arclink.ethz.ch:8080")
    events = obspy.core.event.catalog.Catalog()
    for out in outliers:
        for o in out:
            for e in client.get_events(starttime=o - 120, endtime=o + 120):
                if e not in events:
                    events += e

    return f, events
Exemple #33
0
def main():

    print()
    print("##############################################")
    print("#        __                          _       #")
    print("#  _ __ / _|_ __  _   _     ___ __ _| | ___  #")
    print("# | '__| |_| '_ \| | | |   / __/ _` | |/ __| #")
    print("# | |  |  _| |_) | |_| |  | (_| (_| | | (__  #")
    print("# |_|  |_| | .__/ \__, |___\___\__,_|_|\___| #")
    print("#          |_|    |___/_____|                #")
    print("#                                            #")
    print("##############################################")
    print()

    # Run Input Parser
    args = arguments.get_calc_arguments()

    # Load Database
    db = stdb.io.load_db(fname=args.indb)

    # Construct station key loop
    allkeys = db.keys()
    sorted(allkeys)

    # Extract key subset
    if len(args.stkeys) > 0:
        stkeys = []
        for skey in args.stkeys:
            stkeys.extend([s for s in allkeys if skey in s])
    else:
        stkeys = db.keys()
        sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Define path to see if it exists
        if args.phase in ['P', 'PP']:
            datapath = Path('P_DATA') / stkey
        elif args.phase in ['S', 'SKS']:
            datapath = Path('S_DATA') / stkey
        if not datapath.is_dir():
            print('Path to '+str(datapath)+' doesn`t exist - creating it')
            datapath.mkdir()

        # Establish client for data
        if len(args.UserAuth) == 0:
            data_client = Client(args.Server)
        else:
            data_client = Client(args.Server, user=args.UserAuth[0],
                                 password=args.UserAuth[1])

        # Establish client for events
        event_client = Client()

        # Get catalogue search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get catalogue search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT
        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|===============================================|")
        print("|===============================================|")
        print("|                   {0:>8s}                    |".format(
            sta.station))
        print("|===============================================|")
        print("|===============================================|")
        print("|  Station: {0:>2s}.{1:5s}                            |".format(
            sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}  |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}          |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}          |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|-----------------------------------------------|")
        print("| Searching Possible events:                    |")
        print("|   Start: {0:19s}                  |".format(
            tstart.strftime("%Y-%m-%d %H:%M:%S")))
        print("|   End:   {0:19s}                  |".format(
            tend.strftime("%Y-%m-%d %H:%M:%S")))
        if args.maxmag is None:
            print("|   Mag:   >{0:3.1f}", format(args.minmag) +
                  "                                 |")
        else:
            msg = "|   Mag:   {0:3.1f}".format(args.minmag) + \
                " - {0:3.1f}".format(args.maxmag) + \
                "                            |"
            print(msg)

        print("| ...                                           |")

        # Get catalogue using deployment start and end
        cat = event_client.get_events(
            starttime=tstart, endtime=tend,
            minmagnitude=args.minmag, maxmagnitude=args.maxmag)

        # Total number of events in Catalogue
        nevK = 0
        nevtT = len(cat)
        print(
            "|  Found {0:5d}".format(nevtT) +
            " possible events                  |")
        ievs = range(0, nevtT)

        # Get Local Data Availabilty
        if len(args.localdata) > 0:
            print("|-----------------------------------------------|")
            print("| Cataloging Local Data...                      |")
            if args.useNet:
                stalcllist = utils.list_local_data_stn(
                    lcldrs=args.localdata, sta=sta.station,
                    net=sta.network, altnet=sta.altnet)
                print("|   {0:>2s}.{1:5s}: {2:6d}".format(
                    sta.network, sta.station, len(stalcllist)) +
                    " files                      |")
                print(stalcllist[0:10])
            else:
                stalcllist = utils.list_local_data_stn(
                    lcldrs=args.localdata, sta=sta.station)
                print("|   {0:5s}: {1:6d} files                " +
                      "        |".format(
                          sta.station, len(stalcllist)))
        else:
            stalcllist = []
        print("|===============================================|")

        # Select order of processing
        if args.reverse:
            ievs = range(0, nevtT)
        else:
            ievs = range(nevtT-1, -1, -1)

        # Read through catalogue
        for iev in ievs:

            # Extract event
            ev = cat[iev]

            # Initialize RF object with station info
            rfdata = RFData(sta)

            # Add event to rfdata object
            accept = rfdata.add_event(
                ev, gacmin=args.mindist, gacmax=args.maxdist,
                phase=args.phase, returned=True)

            # Define time stamp
            yr = str(rfdata.meta.time.year).zfill(4)
            jd = str(rfdata.meta.time.julday).zfill(3)
            hr = str(rfdata.meta.time.hour).zfill(2)

            # If event is accepted (data exists)
            if accept:

                # Display Event Info
                nevK = nevK + 1
                if args.reverse:
                    inum = iev + 1
                else:
                    inum = nevtT - iev + 1
                print(" ")
                print("**************************************************")
                print("* #{0:d} ({1:d}/{2:d}):  {3:13s} {4}".format(
                    nevK, inum, nevtT, rfdata.meta.time.strftime(
                        "%Y%m%d_%H%M%S"), stkey))
                if args.verb:
                    print("*   Phase: {}".format(args.phase))
                    print("*   Origin Time: " +
                          rfdata.meta.time.strftime("%Y-%m-%d %H:%M:%S"))
                    print(
                        "*   Lat: {0:6.2f};        Lon: {1:7.2f}".format(
                            rfdata.meta.lat, rfdata.meta.lon))
                    print(
                        "*   Dep: {0:6.2f} km;     Mag: {1:3.1f}".format(
                            rfdata.meta.dep, rfdata.meta.mag))
                    print("*   Dist: {0:7.2f} km;".format(rfdata.meta.epi_dist) +
                          "   Epi dist: {0:6.2f} deg\n".format(rfdata.meta.gac) +
                          "*   Baz:  {0:6.2f} deg;".format(rfdata.meta.baz) +
                          "   Az: {0:6.2f} deg".format(rfdata.meta.az))

                # Event Folder
                timekey = rfdata.meta.time.strftime("%Y%m%d_%H%M%S")
                evtdir = datapath / timekey
                RFfile = evtdir / 'RF_Data.pkl'
                ZNEfile = evtdir / 'ZNE_Data.pkl'
                metafile = evtdir / 'Meta_Data.pkl'
                stafile = evtdir / 'Station_Data.pkl'

                # Check if RF data already exist and overwrite has been set
                if evtdir.exists():
                    if RFfile.exists():
                        if not args.ovr:
                            continue

                # Get data
                has_data = rfdata.download_data(
                    client=data_client, dts=args.dts, stdata=stalcllist,
                    ndval=args.ndval, new_sr=args.new_sampling_rate,
                    returned=True, verbose=args.verb)

                if not has_data:
                    continue

                # Create Folder if it doesn't exist
                if evtdir.exists():
                    evtdir.mkdir()

                # Save ZNE Traces
                pickle.dump(rfdata.data, open(ZNEfile, "wb"))

                # Rotate from ZNE to 'align' ('ZRT', 'LQT', or 'PVH')
                rfdata.rotate(vp=args.vp, vs=args.vs, align=args.align)

                # Calculate snr over dt_snr seconds
                rfdata.calc_snr(
                    dt=args.dt_snr, fmin=args.fmin, fmax=args.fmax)
                if args.verb:
                    print("* SNR: {}".format(rfdata.meta.snr))

                # Make sure no processing happens for NaNs
                if np.isnan(rfdata.meta.snr):
                    if args.verb:
                        print("* SNR NaN...Skipping")
                    print("**************************************************")
                    continue

                # Deconvolve data
                rfdata.deconvolve(
                    vp=args.vp, vs=args.vs,
                    align=args.align, method=args.method,
                    gfilt=args.gfilt, wlevel=args.wlevel,
                    pre_filt=args.pre_filt)

                # Get cross-correlation QC
                rfdata.calc_cc()
                if args.verb:
                    print("* CC: {}".format(rfdata.meta.cc))

                # Convert to Stream
                rfstream = rfdata.to_stream()

                # Save event meta data
                pickle.dump(rfdata.meta, open(metafile, "wb"))

                # Save Station Data
                pickle.dump(rfdata.sta, open(stafile, "wb"))

                # Save RF Traces
                pickle.dump(rfstream, open(RFfile, "wb"))

                # Update
                if args.verb:
                    print("* Wrote Output Files to: ")
                    print("*     "+str(evtdir))
                print("**************************************************")
Exemple #34
0
def plotenvelopediffhist2d(comp,
                           ref,
                           limit=999999999,
                           bins=99,
                           mode='rel',
                           todo = [{'location':'EA','channel':'Z','gm':'Acc','unit':r'm/s$^2$'},
                                   {'location':'EV','channel':'Z','gm':'Vel','unit':r'm/s'},
                                   {'location':'ED','channel':'Z','gm':'Disp','unit':r'm'}]):

    f = matplotlib.pyplot.figure(figsize=(12,12))
    axes = f.subplots(3,2)#,sharex=True)

    locations=[numpy.asarray([str(tr.stats.location) for tr in comp]),
               numpy.asarray([str(tr.stats.location) for tr in ref])]
    orientations=[numpy.asarray([str(tr.stats.channel[-2]) for tr in comp]),
                  numpy.asarray([str(tr.stats.channel[-1]) for tr in ref])]
    outliers=[]
    for itd,td in enumerate(todo):
        
        mask=(locations[0]==td['location'])&(orientations[0]==td['channel'])
        stream = [comp[m] for m in numpy.where(mask)[0] ] #comp.select(location=td['location'])
        
        mask=(locations[1]==td['location'])&(orientations[1]==td['channel'])
        refstream = [ref[m] for m in numpy.where(mask)[0] ] #ref.select(location=td['location'])
        

        n=0
        diffs=[]
        env=[]
        times=[]

        ids=numpy.asarray([str(tr.id) for tr in refstream])
        starttimes=numpy.asarray([tr.stats.starttime.timestamp for tr in refstream])
        endtimes=numpy.asarray([tr.stats.endtime.timestamp for tr in refstream])
        
        for trace in stream:
            mask = (str(trace.id[:-1]) == ids)&(tr.stats.starttime.timestamp<endtimes)&(tr.stats.endtime.timestamp>starttimes)
            reftrace = [refstream[m] for m in numpy.where(mask)[0] ]
            
            if False:
                reftrace = select(refstream,
                                  endafter=trace.stats.starttime,
                                  startbefore=trace.stats.endtime,
                                  id=trace.id[:-1])
                              
            for rtrace in reftrace:
                
                iref, itr, starttime, npts = overlap(rtrace,trace)
                mask= numpy.isnan(trace.data) | numpy.isnan(rtrace.data)
                mask=[not i for i in mask]
                iref=iref&mask
                itr=itr&mask
                [diffs.append(s) for s in trace.data[itr]]
                [env.append(s) for s in rtrace.data[iref]]
                [times.append(s) for s in rtrace.times("utcdatetime")[iref]]

            if len(diffs)>=limit:
                print('REACHED LIMIT!')
                break

        for i in [0,1]:
            axes[itd,i].grid()
            axes[itd,i].set_yscale('log')
            axes[itd,i].set_ylabel('%s scenvelope (%s)'%(td['gm'],td['unit']))
            if i == 1:
                d=100.*numpy.asarray(diffs)/numpy.asarray(env)
                outliers.append(numpy.asarray(times)[(d>90.)&(numpy.asarray(env)>max(env)*.9)])
                h,x,y,im=axes[itd,i].hist2d(d,env,
                                    bins=[numpy.linspace(min(d),max(d),bins),
                                          numpy.logspace(numpy.log10(min(env)),numpy.log10(max(env)),bins)],
                                            norm=matplotlib.colors.LogNorm(),
                                            normed=True)
                im.set_data(x,y,im.get_array()*1./sum(sum(h)))
                im.set_norm(matplotlib.colors.LogNorm())
                axes[itd,i].set_xlabel('%s diff (%s)'%(td['gm'],'%'))

            else:
                h,x,y,im=axes[itd,i].hist2d(diffs,env,
                                    bins=[numpy.linspace(min(diffs),max(diffs),bins),
                                          numpy.logspace(numpy.log10(min(env)),numpy.log10(max(env)),bins)],
                                            norm=matplotlib.colors.LogNorm(),
                                            normed=True)

                im.set_data(x,y,im.get_array()*1./sum(sum(h)))
                im.set_norm(matplotlib.colors.LogNorm())
                axes[itd,i].set_xlabel('%s diff (%s)'%(td['gm'],td['unit']))

            cb=matplotlib.pyplot.colorbar(im,ax=axes[itd,i])
            cb.ax.set_ylabel('Probability')
    f.tight_layout()
    #from obspy.clients.fdsn import Client
    client = Client("http://arclink.ethz.ch:8080")
    events = obspy.core.event.catalog.Catalog()
    for out in outliers:
        for o in out:
            for e in client.get_events(starttime=o-120, endtime=o+120):
                if e not in events:
                    events+=e

    return f,events
def mktemplates(network_code='GEONET',
                publicIDs=['2016p008122', '2016p008353', '2016p008155',
                           '2016p008194']):
    """Functional wrapper to make templates"""

    from collections import Counter
    from eqcorrscan.core import template_gen

    # This import section copes with namespace changes between obspy versions
    import obspy
    if int(obspy.__version__.split('.')[0]) >= 1:
        from obspy.clients.fdsn import Client
        from obspy import read_events
    else:
        from obspy.fdsn import Client
        from obspy import readEvents as read_events
    from obspy.core.event import Catalog

    # We want to download some QuakeML files from the New Zealand GeoNet
    # network, GeoNet currently doesn't support FDSN event queries, so we
    # have to work around to download quakeml from their quakeml.geonet site.

    client = Client(network_code)
    # We want to download a few events from an earthquake sequence, these are
    # identified by publiID numbers, given as arguments

    catalog = Catalog()
    for publicID in publicIDs:
        if network_code == 'GEONET':
            data_stream = client._download('http://quakeml.geonet.org.nz/' +
                                           'quakeml/1.2/' + publicID)
            data_stream.seek(0, 0)
            catalog += read_events(data_stream, format="quakeml")
            data_stream.close()
        else:
            catalog += client.get_events(eventid=publicID,
                                         includearrivals=True)

    # Lets plot the catalog to see what we have
    catalog.plot(projection='local', resolution='h')

    # We don't need all the picks, lets take the information from the
    # five most used stations
    all_picks = []
    for event in catalog:
        all_picks += [(pick.waveform_id.station_code) for pick in event.picks]
    all_picks = Counter(all_picks).most_common(5)
    all_picks = [pick[0] for pick in all_picks]

    for event in catalog:
        if len(event.picks) == 0:
            raise IOError('No picks found')
        event.picks = [pick for pick in event.picks
                       if pick.waveform_id.station_code in all_picks]

    # Now we can generate the templates
    templates = template_gen.from_client(catalog=catalog,
                                         client_id=network_code,
                                         lowcut=2.0, highcut=9.0,
                                         samp_rate=20.0, filt_order=4,
                                         length=3.0, prepick=0.15,
                                         swin='all', debug=1, plot=True)

    # We now have a series of templates! Using Obspys Stream.write() method we
    # can save these to disk for later use.  We will do that now for use in the
    # following tutorials.
    for i, template in enumerate(templates):
        template.write('tutorial_template_' + str(i) + '.ms', format='MSEED')
        # Note that this will warn you about data types.  As we don't care
        # at the moment, whatever obspy chooses is fine.
    return
import numpy as np
import numpy.matlib as mat
#import scipy as Sci
#import scipy.linalg


#---PARAMETERIZATION---#
plt.style.use('ggplot')
plt.rcParams['figure.figsize'] = 12, 8

#---DATA_FROM_FDSN---#
#get specified event
client = Client(base_url = "http://arclink.ethz.ch")
starttime = UTCDateTime("2016-05-20")
endtime = UTCDateTime("2016-05-22")
cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=2, limit=5, mindepth=5)
print(type(cat)) #Catalog
print(cat)
cat.plot(outfile='output/py_eq_utc20160521_ml3.0_event.png') #add also resouces like: projection="local"

#get stations with the event
evt = cat[1]
print(type(evt))
print(evt)
origin = evt.origins[0]
otime = origin.time
print(type(origin))
t = origin.time
inv = client.get_stations(longitude=origin.longitude, latitude=origin.latitude, maxradius=0.2, starttime=t, endtime =t+100, channel="HH?", network="CH", level="station")
print(type(inv))
print(inv)
Exemple #37
0
def fdsnws(
        base_url="http://arclink.ethz.ch:8080",
        endafter=40.,
        maxradius=.6,
        location='*',
        channel='HNZ,HNE,HNN,HGZ,HGE,HGN,HHZ,HHE,HHN,EHZ,EHE,EHN,SHZ,SHE,SHN',
        stations_base_url=None,
        waveforms_base_url=None,
        quality=None,
        minimumlength=None,
        longestonly=None,
        correction_method=remove_sensitivity,
        eventid=None,
        **get_events_options):

    # First import :
    from obspy.clients.fdsn import Client
    fdsnclient = Client(base_url)

    # eventid in URL case
    if eventid is None:
        eventid = 'smi:ch.ethz.sed/sc3a/2017epaqsp'
        print('Picks default eventid:', eventid)
    elif '#' in eventid:
        eventid = eventid.split('#')[-1]
        print('Picks eventid in URL format:', eventid)

    # Special clients systems
    stationsclient = fdsnclient
    waveformsclient = fdsnclient
    if stations_base_url:
        stationsclient = Client(stations_base_url)
        if not waveforms_base_url:
            waveformsclient = Client(stations_base_url)
    if waveforms_base_url:
        waveformsclient = Client(waveforms_base_url)
        if not stations_base_url:
            stationsclient = Client(waveforms_base_url)

    # Load event
    fdsnclient.get_events(eventid=eventid,
                          format='sc3ml',
                          filename='events.xml',
                          **get_events_options)
    eventstreams = {
        'catalog': obspy.read_events('events.xml', format='sc3ml'),
        'inventory': obspy.core.inventory.Inventory([], None),
        'raw': obspy.core.Stream()
    }
    if eventstreams['catalog'] is None:
        print('catalog is', eventstreams['catalog'])
    for output in ['catalog', 'inventory', 'raw']:
        eventstreams[output].output = output

    for event in eventstreams['catalog'].events:

        # Load stations
        t = event.preferred_origin().time
        try:
            inventory = stationsclient.get_stations(
                level='station',
                startbefore=t,
                endafter=t + endafter,
                latitude=event.preferred_origin().latitude,
                longitude=event.preferred_origin().longitude,
                maxradius=maxradius,
                location=location,
                channel=channel)
        except:
            print('No station found for event:')
            print(event)
            print('Using client:')
            print(stationsclient)
            continue
        # Load waveforms
        addons = [location, channel] + [t, t + endafter]
        bulk = [
            tuple(station.split()[0].split('.')[:2] + addons)
            for station in inventory.get_contents()['stations']
        ]
        try:
            waveforms = waveformsclient.get_waveforms_bulk(
                bulk,
                attach_response=True,
                quality=quality,
                minimumlength=minimumlength,
                longestonly=longestonly)
        except:
            print('No waveform found for request:')
            print(bulk)
            print('Using client:')
            print(waveformsclient)
            continue
        # Improve waveforms attributes
        for trace in waveforms:
            station = inventory.select(
                network=trace.stats.network,
                station=trace.stats.station).networks[0].stations[0]
            trace.stats.coordinates = {
                'latitude': station.latitude,
                'longitude': station.longitude,
                'elevation': station.elevation
            }
            distance = obspy.geodetics.base.gps2dist_azimuth(
                station.latitude, station.longitude,
                event.preferred_origin().latitude,
                event.preferred_origin().longitude)[0]
            distance = ((distance**2 +
                         (trace.stats.coordinates['elevation'] * -1)**2.)**.5)
            distance = distance / len(eventstreams['catalog'].events)
            if not hasattr(trace.stats, 'distance'):
                trace.stats.distance = 0.
            trace.stats.distance += distance

        eventstreams['inventory'] += inventory
        eventstreams['raw'] += waveforms

    eventstreams['raw'].sort(keys=['distance'])

    if correction_method:
        eventstreams = correction_method(eventstreams)

    return eventstreams
Exemple #38
0
def run(analysis_start, analysis_len, template_creation_start,
        template_creation_len, write_streams, intermediate_party_output,
        final_party_output, intermediate_stream_output, final_stream_output):
    client = Client("http://service.geonet.org.nz")
    # for template creation
    day_len = 86400
    t2 = template_creation_start + (template_creation_len * day_len)
    catalog = client.get_events(starttime=template_creation_start,
                                endtime=t2,
                                minmagnitude=2.5,
                                minlatitude=-37.95936,
                                maxlatitude=-36.84226,
                                minlongitude=176.63818,
                                maxlongitude=177.80548)

    # Get rid of duplicately picked arrivals.
    for event in catalog:
        counted_stations = Counter(p.waveform_id.get_seed_string()
                                   for p in event.picks)
        _picks = []
        for seed_id, n_picks in counted_stations.items():
            nslc_picks = [
                p for p in event.picks
                if p.waveform_id.get_seed_string() == seed_id
            ]
            if n_picks == 1:
                _picks.append(nslc_picks[0])
            else:
                print("Multiple picks for {0}".format(seed_id))
                nslc_picks.sort(key=lambda p: p.time)
                _picks.append(nslc_picks[0])
        event.picks = _picks

    catalog = filter_picks(catalog=catalog,
                           evaluation_mode="manual",
                           top_n_picks=20)

    tribe = Tribe().construct(method="from_client",
                              lowcut=2.0,
                              highcut=15.0,
                              samp_rate=50.0,
                              length=6.0,
                              filt_order=4,
                              prepick=0.5,
                              client_id=client,
                              catalog=catalog,
                              data_pad=20.,
                              process_len=day_len,
                              min_snr=5.0,
                              parallel=False)
    print(tribe)
    print(tribe[0])

    tribe.templates = [
        t for t in tribe if len({tr.stats.station
                                 for tr in t.st}) >= 5
    ]
    print(tribe)
    if write_streams is True:
        for day in range(1, analysis_len):
            _party, st = tribe.client_detect(
                client=client,
                starttime=analysis_start + (day - 1) * day_len,
                endtime=analysis_start + day * day_len,
                threshold=9.,
                threshold_type="MAD",
                trig_int=2.0,
                plot=False,
                return_stream=True)
            _party.write(intermediate_party_output +
                         "/Detections_day_{0}".format(day))
            st = st.split()  # Required for writing to miniseed
            st.write(intermediate_stream_output + "/{0}.ms".format(day),
                     format="MSEED")
        reform_party(final_party_output, intermediate_party_output)
        reform_stream(final_stream_output, intermediate_stream_output)

    else:
        for day in range(1, analysis_len):
            _party = tribe.client_detect(
                client=client,
                starttime=analysis_start + (day - 1) * day_len,
                endtime=analysis_start + day * day_len,
                threshold=9.,
                threshold_type="MAD",
                trig_int=2.0,
                plot=False,
                return_stream=False)
            _party.write(intermediate_party_output +
                         "/Detections_day_{0}".format(day))
        reform_party(final_party_output, intermediate_party_output)
Exemple #39
0
    def test_redirection_auth(self):
        """
        Tests the redirection of GET and POST requests using authentication.

        By default these should not redirect and an exception is raised.
        """
        # Clear the cache.
        Client._Client__service_discovery_cache.clear()

        # The error will already be raised during the initialization in most
        # cases.
        self.assertRaises(
            FDSNRedirectException,
            Client,
            "IRIS",
            service_mappings={
                "station": "http://ds.iris.edu/files/redirect/307/station/1",
                "dataselect":
                "http://ds.iris.edu/files/redirect/307/dataselect/1",
                "event": "http://ds.iris.edu/files/redirect/307/event/1"
            },
            user="******",
            password="******",
            user_agent=USER_AGENT)

        # The force_redirect flag overwrites that behaviour.
        c_auth = Client(
            "IRIS",
            service_mappings={
                "station": "http://ds.iris.edu/files/redirect/307/station/1",
                "dataselect":
                "http://ds.iris.edu/files/redirect/307/dataselect/1",
                "event": "http://ds.iris.edu/files/redirect/307/event/1"
            },
            user="******",
            password="******",
            user_agent=USER_AGENT,
            force_redirect=True)

        st = c_auth.get_waveforms(
            network="IU",
            station="ANMO",
            location="00",
            channel="BHZ",
            starttime=UTCDateTime("2010-02-27T06:30:00.000"),
            endtime=UTCDateTime("2010-02-27T06:30:01.000"))
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        inv = c_auth.get_stations(starttime=UTCDateTime("2000-01-01"),
                                  endtime=UTCDateTime("2001-01-01"),
                                  network="IU",
                                  station="ANMO",
                                  level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))

        cat = c_auth.get_events(starttime=UTCDateTime("2001-01-07T01:00:00"),
                                endtime=UTCDateTime("2001-01-07T01:05:00"),
                                catalog="ISC")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(cat)))

        # Also test the bulk requests which are done using POST requests.
        bulk = (("TA", "A25A", "", "BHZ", UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")),
                ("TA", "A25A", "", "BHE", UTCDateTime("2010-03-25T00:00:00"),
                 UTCDateTime("2010-03-25T00:00:01")))
        st = c_auth.get_waveforms_bulk(bulk, quality="B", longestonly=False)
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(st)))

        starttime = UTCDateTime(1990, 1, 1)
        endtime = UTCDateTime(1990, 1, 1) + 10
        bulk = [
            ["IU", "ANMO", "", "BHE", starttime, endtime],
            ["IU", "CCM", "", "BHZ", starttime, endtime],
        ]
        inv = c_auth.get_stations_bulk(bulk, level="network")
        # Just make sure something is being downloaded.
        self.assertTrue(bool(len(inv.networks)))
Exemple #40
0
def main():

    # Run Input Parser
    args = arguments.get_arguments_calc_auto()

    # Load Database
    # stdb=0.1.4
    try:
        db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys)

    # stdb=0.1.3
    except:
        db = stdb.io.load_db(fname=args.indb)

        # Construct station key loop
        allkeys = db.keys()
        sorted(allkeys)

        # Extract key subset
        if len(args.stkeys) > 0:
            stkeys = []
            for skey in args.stkeys:
                stkeys.extend([s for s in allkeys if skey in s])
        else:
            stkeys = db.keys()
            sorted(stkeys)

    # Loop over station keys
    for stkey in list(stkeys):

        # Extract station information from dictionary
        sta = db[stkey]

        # Output directory
        datapath = Path('DATA') / stkey
        if not datapath.is_dir():
            datapath.mkdir(parents=True)

        # Establish client
        if len(args.UserAuth) == 0:
            data_client = Client(args.Server)
        else:
            data_client = Client(args.Server,
                                 user=args.UserAuth[0],
                                 password=args.UserAuth[1])

        # Establish client for events
        event_client = Client()

        # Get catalogue search start time
        if args.startT is None:
            tstart = sta.startdate
        else:
            tstart = args.startT

        # Get catalogue search end time
        if args.endT is None:
            tend = sta.enddate
        else:
            tend = args.endT
        if tstart > sta.enddate or tend < sta.startdate:
            continue

        # Temporary print locations
        tlocs = sta.location
        if len(tlocs) == 0:
            tlocs = ['']
        for il in range(0, len(tlocs)):
            if len(tlocs[il]) == 0:
                tlocs[il] = "--"
        sta.location = tlocs

        # Update Display
        print(" ")
        print(" ")
        print("|" + "=" * 50 + "|")
        print("|                   {0:>8s}                       |".format(
            sta.station))
        print("|" + "=" * 50 + "|")
        print("|  Station: {0:>2s}.{1:5s}                               |".
              format(sta.network, sta.station))
        print("|      Channel: {0:2s}; Locations: {1:15s}     |".format(
            sta.channel, ",".join(tlocs)))
        print("|      Lon: {0:7.2f}; Lat: {1:6.2f}                   |".format(
            sta.longitude, sta.latitude))
        print("|      Start time: {0:19s}             |".format(
            sta.startdate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|      End time:   {0:19s}             |".format(
            sta.enddate.strftime("%Y-%m-%d %H:%M:%S")))
        print("|" + "-" * 50 + "|")
        print("| Searching Possible events:                       |")
        print("|   Start: {0:19s}                     |".format(
            tstart.strftime("%Y-%m-%d %H:%M:%S")))
        print("|   End:   {0:19s}                     |".format(
            tend.strftime("%Y-%m-%d %H:%M:%S")))
        if args.maxmag is None:
            print("|   Mag:   >{0:3.1f}".format(args.minmag) +
                  "                                     |")
        else:
            msg = "|   Mag:   {0:3.1f}".format(args.minmag) + \
                " - {0:3.1f}".format(args.maxmag) + \
                "                           |"
            print(msg)

        print("| ...                                              |")

        # Get catalogue using deployment start and end
        cat = event_client.get_events(starttime=tstart,
                                      endtime=tend,
                                      minmagnitude=args.minmag,
                                      maxmagnitude=args.maxmag)

        # Total number of events in Catalogue
        nevK = 0
        nevtT = len(cat)
        print("|  Found {0:5d}".format(nevtT) +
              " possible events                     |")
        ievs = range(0, nevtT)

        # Get Local Data Availabilty
        if len(args.localdata) > 0:
            print("|" + "-" * 50 + "|")
            print("| Cataloging Local Data...                         |")
            if args.useNet:
                stalcllist = utils.list_local_data_stn(lcldrs=args.localdata,
                                                       sta=sta.station,
                                                       net=sta.network,
                                                       dtype=args.dtype,
                                                       altnet=sta.altnet)
                print("|   {0:>2s}.{1:5s}: {2:6d} files              ".format(
                    sta.network, sta.station, len(stalcllist)))
            else:
                stalcllist = utils.list_local_data_stn(lcldrs=args.localdata,
                                                       dtype=args.dtype,
                                                       sta=sta.station)
                print("|   {0:5s}: {1:6d} files                      ".format(
                    sta.station, len(stalcllist)))
        else:
            stalcllist = []
        print("|" + "=" * 50 + "|")

        # Select order of processing
        if args.reverse:
            ievs = range(0, nevtT)
        else:
            ievs = range(nevtT - 1, -1, -1)

        # Read through catalogue
        for iev in ievs:

            # Extract event
            ev = cat[iev]

            # Initialize Split object with station info
            split = Split(sta)

            # Add event to split object
            accept = split.add_event(ev,
                                     gacmin=args.mindist,
                                     gacmax=args.maxdist,
                                     phase=args.phase,
                                     returned=True)

            # Define time stamp
            yr = str(split.meta.time.year).zfill(4)
            jd = str(split.meta.time.julday).zfill(3)
            hr = str(split.meta.time.hour).zfill(2)

            # If event is accepted (data exists)
            if accept:

                # Display Event Info
                nevK = nevK + 1
                if args.reverse:
                    inum = iev + 1
                else:
                    inum = nevtT - iev + 1
                print(" ")
                print("|" + "*" * 50 + "|")
                print("* #{0:d} ({1:d}/{2:d}):  {3:13s} {4}".format(
                    nevK, inum, nevtT,
                    split.meta.time.strftime("%Y%m%d_%H%M%S"), stkey))
                if args.verb:
                    print("*   Phase: {}".format(args.phase))
                    print("*   Origin Time: " +
                          split.meta.time.strftime("%Y-%m-%d %H:%M:%S"))
                    print("*   Lat: {0:6.2f};        Lon: {1:7.2f}".format(
                        split.meta.lat, split.meta.lon))
                    print("*   Dep: {0:6.2f} km;     Mag: {1:3.1f}".format(
                        split.meta.dep, split.meta.mag))
                    print(
                        "*   Dist: {0:7.2f} km;".format(split.meta.epi_dist) +
                        "   Epi dist: {0:6.2f} deg\n".format(split.meta.gac) +
                        "*   Baz:  {0:6.2f} deg;".format(split.meta.baz) +
                        "   Az: {0:6.2f} deg".format(split.meta.az))

                # Event Folder
                timekey = split.meta.time.strftime("%Y%m%d_%H%M%S")
                datadir = datapath / timekey
                ZNEfile = datadir / 'ZNE_data.pkl'
                LQTfile = datadir / 'LQT_data.pkl'
                metafile = datadir / 'Meta_data.pkl'
                stafile = datadir / 'Station_data.pkl'
                splitfile = datadir / 'Split_results_auto.pkl'

                # Check if RF data already exist and overwrite has been set
                if datadir.exists():
                    if splitfile.exists():
                        if not args.ovr:
                            continue

                if args.recalc:
                    if np.sum([
                            file.exists()
                            for file in [ZNEfile, metafile, stafile]
                    ]) < 3:
                        continue
                    sta = pickle.load(open(stafile, "rb"))
                    split = Split(sta)
                    meta = pickle.load(open(metafile, "rb"))
                    split.meta = meta
                    dataZNE = pickle.load(open(ZNEfile, "rb"))
                    split.dataZNE = dataZNE

                    # Rotate from ZNE to 'LQT'
                    split.rotate(align='LQT')

                    # Filter rotated traces
                    split.dataLQT.filter('bandpass',
                                         freqmin=args.fmin,
                                         freqmax=args.fmax)

                    # Calculate snr over dt_snr seconds
                    split.calc_snr()

                    # Save LQT Traces
                    pickle.dump(split.dataLQT, open(LQTfile, "wb"))

                else:

                    # Get data
                    has_data = split.download_data(
                        client=data_client,
                        dts=args.dts,
                        stdata=stalcllist,
                        dtype=args.dtype,
                        ndval=args.ndval,
                        new_sr=args.new_sampling_rate,
                        returned=True,
                        verbose=args.verb)

                    if not has_data:
                        continue

                    # Rotate from ZNE to 'LQT'
                    split.rotate(align='LQT')

                    # Filter rotated traces
                    split.dataLQT.filter('bandpass',
                                         freqmin=args.fmin,
                                         freqmax=args.fmax)

                    # Calculate snr over dt_snr seconds
                    split.calc_snr()

                    # If SNR lower than user-specified threshold, continue
                    if split.meta.snrq < args.msnr:
                        if args.verb:
                            print("* SNRQ < {0:.1f}, continuing".format(
                                args.msnr))
                            print("*" * 50)
                        continue

                    # Make sure no processing happens for NaNs
                    if np.isnan(split.meta.snrq):
                        if args.verb:
                            print("* SNR NaN, continuing")
                            print("*" * 50)
                        continue

                    # Create Folder if it doesn't exist
                    if not datadir.exists():
                        datadir.mkdir(parents=True)

                    # Save ZNE Traces
                    pickle.dump(split.dataZNE, open(ZNEfile, "wb"))

                    # Save LQT Traces
                    pickle.dump(split.dataLQT, open(LQTfile, "wb"))

                if args.verb:
                    print("* SNRQ: {}".format(split.meta.snrq))
                    print("* SNRT: {}".format(split.meta.snrt))

                if args.calc or args.recalc:

                    # Analyze
                    split.analyze(verbose=args.verb)

                    # Continue if problem with analysis
                    if split.RC_res.edtt is None or split.SC_res.edtt is None:
                        if args.verb:
                            print("* !!! DOF Error. --> Skipping...")
                            print("*" * 50)
                        continue

                    # Determine if Null and Quality of estimate
                    split.is_null(args.snrTlim, verbose=args.verb)
                    split.get_quality(verbose=args.verb)

                # Display results
                if args.verb:
                    split.display_meta()
                    if args.calc or args.recalc:
                        split.display_results()
                        split.display_null_quality()

                # Save event meta data
                pickle.dump(split.meta, open(metafile, "wb"))

                # Save Station Data
                pickle.dump(split.sta, open(stafile, "wb"))

                if args.calc or args.recalc:
                    # Save Split Data
                    file = open(splitfile, "wb")
                    pickle.dump(split.SC_res, file)
                    pickle.dump(split.RC_res, file)
                    pickle.dump(split.null, file)
                    pickle.dump(split.quality, file)
                    file.close()

                    # Initialize diagnostic figure and plot it
                    if args.diagplot:
                        dplot = DiagPlot(split)
                        dplot.plot_diagnostic()
                        plt.figure(dplot.axes[0].number)
                        plt.show()
class Fetch:
    def __init__(self,network=None,station=None,level='channel',channel='BH*',starttime=None,endtime=None,\
     minlongitude=None,maxlongitude=None,minlatitude=None,maxlatitude=None,mindepth=None,maxdepth=None,clientname="IRIS",\
     vmodel="ak135",station_autoselect=False):
        '''Note that network and station can be a list of inputs,like "AK,TA,AT"'''

        self.client = Client(clientname)
        self.clientname = clientname

        self.network = network
        self.station = station
        self.level = level
        self.channel = channel

        if endtime == 'today':
            endtime = str(datetime.datetime.today())

        self.starttime = UTCDateTime(starttime)
        self.endtime = UTCDateTime(endtime)

        self.minlatitude = minlatitude
        self.minlongitude = minlongitude
        self.maxlatitude = maxlatitude
        self.maxlongitude = maxlongitude

        #Quake catalog object
        self.quake_cat = None
        self.inventory = None

        #For ray calculation
        self.vmodel = TauPyModel(model=vmodel)

        #EK added 03/2019
        self.mindepth = mindepth
        self.maxdepth = maxdepth

        self.station_autoselect_flag = station_autoselect

    def fetchInventory(self):
        '''Get an obspy inventory containing all the station information'''

        if self.station != 'None':
            self.inventory = self.client.get_stations(network=self.network,station=self.station,level=self.level,\
             channel=self.channel,starttime=self.starttime,endtime=self.endtime,minlongitude=self.minlongitude,\
             minlatitude=self.minlatitude,maxlongitude=self.maxlongitude,maxlatitude=self.maxlatitude)
            print(self.inventory)
        else:
            self.inventory = self.client.get_stations(network=self.network,station=None,level=self.level,\
             channel=self.channel,starttime=self.starttime,endtime=self.endtime,minlongitude=self.minlongitude,
             minlatitude=self.minlatitude,maxlongitude=self.maxlongitude,maxlatitude=self.maxlatitude)

    def fetchEvents(self,
                    centercoords=None,
                    minradius=None,
                    maxradius=None,
                    minmag=6,
                    mindepth=None,
                    maxdepth=None,
                    maxmag=None,
                    tofile=None,
                    display=True):
        '''Get an obspy quake catalog containing the event information that was requested. If centercoords and min/max radius
		are set, then the program will use those to fetch. If not, it will use the user-supplied box coordinates. User supplied dates and
		times are also used'''
        self.maxmag = maxmag
        self.minmag = minmag
        self.minradius = minradius
        self.maxradius = maxradius
        self.centercoords = centercoords

        if centercoords:

            print("\nGathering earthquakes using center/radius info\n")

            self.quake_cat = self.client.get_events(starttime=self.starttime,endtime=self.endtime,latitude=self.centercoords[0],\
             longitude=self.centercoords[1],minradius=self.minradius,maxradius=self.maxradius,minmagnitude=minmag,mindepth=self.mindepth,maxdepth=self.maxdepth,maxmagnitude=maxmag)

        else:

            print("\nGathering earthquakes within bounding box\n")
            #                        print(self.minlongitude,self.maxlongitude,self.minlatitude,self.maxlatitude)

            self.quake_cat = self.client.get_events(
                starttime=self.starttime,
                endtime=self.endtime,
                minlatitude=self.minlatitude,
                maxlatitude=self.maxlatitude,
                minlongitude=self.minlongitude,
                maxlongitude=self.maxlongitude,
                minmagnitude=self.minmag,
                maxmagnitude=self.maxmag,
                mindepth=self.mindepth,
                maxdepth=self.maxdepth)

        if display == True:

            print("---------------------------------")
            print("Got the following events")
            print("---------------------------------")
            print(self.quake_cat.__str__(print_all=True))

    def writeEvents(self, centercoords=None):
        '''Write event information to file, which can be loaded as a pandas dataframe.
		Specify centercoords as a list [lon,lat] and the time of the first arrival (P) arrival
		will be reported'''

        ofname = 'Events_%s_%s_%s_%s_%s_%s_mag_%s-%s_depth_%s-%s_km.dat' %(self.starttime,self.endtime,self.minlatitude,\
         self.minlongitude,self.maxlatitude,self.maxlongitude,self.minmag,self.maxmag,self.mindepth,self.maxdepth)

        outfile = open(ofname, 'w')

        if self.quake_cat == None:

            print("Need to call fetchEvents first")
            sys.exit(1)

        if centercoords == None:

            for event in self.quake_cat:

                time = event.origins[0].time
                lat = event.origins[0].latitude
                lon = event.origins[0].longitude
                dep = event.origins[0].depth / 1000.
                mag = event.magnitudes[0].mag

                if self.station_autoselect_flag == True:

                    cnt = 0

                    for network in self.inventory:
                        for station in network:

                            stlat = station.latitude
                            stlon = station.longitude

                            ddeg = locations2degrees(lat, lon, stlat, stlon)
                            distance_m, az, baz = gps2dist_azimuth(
                                lat, lon, stlat, stlon)

                            theta = np.arctan2(distance_m, dep * 1000.)

                            if theta <= np.pi / 4:

                                arrivals = self.vmodel.get_travel_times(
                                    source_depth_in_km=dep,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])

                                if len(arrivals) > 0:
                                    cnt = cnt + 1
                    if cnt > 0:
                        outfile.write("%s %s %s %s %s\n" %
                                      (lon, lat, dep, mag, time))
                else:
                    outfile.write("%s %s %s %s %s\n" %
                                  (lon, lat, dep, mag, time))

        #haven't added the SWW here, so in this case all events wiil be written to the file, might change in the future if needed

        #In this case, we write the time of the first arriving phase at the stations

        else:

            try:
                clon = centercoords[1]
                clat = centercoords[0]
            except:
                print("Centercoors needs to be entered as list [lon,lat]")
                sys.exit(1)

            for event in self.quake_cat:

                time = event.origins[0].time
                lat = event.origins[0].latitude
                lon = event.origins[0].longitude
                dep = event.origins[0].depth / 1000.0

                try:

                    cdist = locations2degrees(lat, lon, clat, clon)
                    arrivals = self.vmodel.get_travel_times(source_depth_in_km=dep,\
                    distance_in_degree=cdist,phase_list=["p","P"])
                except:
                    continue

                if len(arrivals) > 0:
                    first_phase = arrivals[0].name
                    first_phase_time = time + arrivals[0].time

                else:
                    first_phase = 'NaN'
                    first_phase_time = "NaN"

                mag = event.magnitudes[0].mag

                outfile.write("%s %s %s %s %s %s %s %s\n" %
                              (lon, lat, dep, mag, time, first_phase_time,
                               first_phase, cdist))

        outfile.close()

    def writeStations(self):
        '''Write station information to file, which can be loaded as a pandas dataframe'''

        ofname = 'Stations_%s_%s_%s_%s_%s_%s_mag_%s-%s_depth_%s-%s_km.dat' %(self.starttime,self.endtime,self.minlatitude,\
         self.minlongitude,self.maxlatitude,self.maxlongitude,self.minmag,self.maxmag,self.mindepth,self.maxdepth)

        outfile = open(ofname, 'w')

        try:

            for network in self.inventory:

                netname = network.code

                for station in network:

                    code = station.code
                    lat = station.latitude
                    lon = station.longitude
                    ele = station.elevation
                    stdate = station.start_date

                    if self.station_autoselect_flag == True:
                        #EK added 04/2019 to write only stations that we will later download
                        cnt = 0.

                        for event in self.quake_cat:

                            time = event.origins[0].time
                            evlat = event.origins[0].latitude
                            evlon = event.origins[0].longitude
                            dep = event.origins[0].depth / 1000.
                            mag = event.magnitudes[0].mag

                            ddeg = locations2degrees(evlat, evlon, lat, lon)
                            distance_m, az, baz = gps2dist_azimuth(
                                evlat, evlon, lat, lon)

                            theta = np.arctan2(distance_m, dep * 1000.)

                            if theta <= np.pi / 4:

                                arrivals = self.vmodel.get_travel_times(
                                    source_depth_in_km=dep,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])

                                if len(arrivals) > 0:
                                    cnt = cnt + 1
                        if cnt > 0:

                            outfile.write(
                                "%s %s %s %s %s %s\n" %
                                (lon, lat, ele, netname, code, stdate))
                    else:
                        outfile.write("%s %s %s %s %s %s\n" %
                                      (lon, lat, ele, netname, code, stdate))

            outfile.close()

        except:

            print("Need to run fetchInventory before writing stations")
            sys.exit(1)

    def writeRays(self, catalog):
        '''Write station-event information to file, which can be loaded as a pandas dataframe'''

        #Either we want to look at data that has already been downloaded and investiage the station-event pairs, or
        #just make station-event pairs based on whats in the inventory and event catalogs

    def GetData(self,stationdirpath='stations',datadirpath='waveforms',req_type='continuous',\
     chunklength=86400,tracelen=20000, vmodel='ak135'):
        '''Call obspy mass downloader to get waveform data. Chunklength refers to the trace length option
		for a continuous download, tracelen is for an event-based request'''

        #Currently set up to download one day worth of data in the continuous mode, 2000 seconds
        #in the event-based mode

        self.stationdirpath = stationdirpath
        self.datadirpath = datadirpath

        from obspy.clients.fdsn.mass_downloader import RectangularDomain, CircularDomain,\
        Restrictions, MassDownloader

        if req_type == 'continuous':

            #Get data from all stations within this domain

            domain = RectangularDomain(minlatitude=self.minlatitude,maxlatitude=self.maxlatitude,\
             minlongitude=self.minlongitude,maxlongitude=self.maxlongitude)

            #Download data in daily segements - may want to change

            restrictions = Restrictions(\
                                    starttime=self.starttime,endtime=self.endtime,\
                                    chunklength_in_sec=chunklength,\
                                    channel=self.channel,station=self.station,location="",\
                                    reject_channels_with_gaps=False,\
                                    minimum_length=0.0,minimum_interstation_distance_in_m=100.0)

            #Call mass downloader to get the waveform information

            mdl = MassDownloader(providers=[self.clientname])

            mdl.download(domain,
                         restrictions,
                         mseed_storage=datadirpath,
                         stationxml_storage=stationdirpath)

        elif req_type == 'event':

            if self.quake_cat == None:

                print(
                    "Stop: Must call fetchEvents first to get event catalog to download from"
                )
                sys.exit(1)

            #Add option for non-continuous download - event/station pairing for example

            #Ger data for all stations in this domain

            domain = RectangularDomain(minlatitude=self.minlatitude,maxlatitude=self.maxlatitude,\
             minlongitude=self.minlongitude,maxlongitude=self.maxlongitude)

            for event in self.quake_cat:
                cnt = 0.
                print("Downloading data for event %s" % event)

                #For each event, download the waveforms at all stations requested

                origin_time = event.origins[0].time

                vel_model = TauPyModel(model=vmodel)

                #case where we only want to download data for some station-event pairs'
                stations_to_exclude = []

                if self.station_autoselect_flag == True:

                    stations_to_download = []
                    evlat = event.origins[0].latitude
                    evlon = event.origins[0].longitude

                    #EK changes added 04/2019
                    evdep = event.origins[0].depth

                    for network in self.inventory:

                        for station in network:

                            stlat = station.latitude
                            stlon = station.longitude

                            #EK 04/2019
                            #this downloads data within Short Wave Window (SWW), a cone under the station bounded by an angle, here we chose 45 deg
                            #calculate distance between eq and station and azimuth

                            ddeg = locations2degrees(evlat, evlon, stlat,
                                                     stlon)
                            distance_m, az, baz = gps2dist_azimuth(
                                evlat, evlon, stlat, stlon)

                            #calculate proxy for incident angle

                            theta = np.arctan2(distance_m, evdep)

                            if theta <= np.pi / 4:

                                #find if station has needed arrival

                                arrivals = vel_model.get_travel_times(
                                    source_depth_in_km=evdep / 1000.,
                                    distance_in_degree=ddeg,
                                    phase_list=["s", "S"])
                                if len(arrivals) > 0:

                                    #get stations you want to download

                                    stations_to_download.append(station.code)
                                    print(station.code,
                                          'angle = %.2f' % np.rad2deg(theta))
                                    print(arrivals)
                                    cnt = cnt + 1
                                else:
                                    stations_to_exclude.append(station.code)
                            else:

                                if station.code not in stations_to_exclude:
                                    stations_to_exclude.append(station.code)

                    print(
                        "\n-------------\n%g event-station pairs found in SWW\n-------------\n"
                        % cnt)
                    print(
                        "\n-------------\nSelecting just the following stations for download\n-------------\n"
                    )
                    print(stations_to_download)

                    #this approach doesn't work, use exclude_stations flag later
                    #restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                    #reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                    #channel=self.channel,location="",network=self.network,station=stations_to_download)

                #case where we have single network

                if self.network:

                    restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                     reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                     channel=self.channel,location="",network=self.network,exclude_stations=stations_to_exclude)

                #Case where we want all networks within a region (assumes that we also want all stations unless we have built
                # a stations to exclude list)

                else:

                    restrictions = Restrictions(starttime=origin_time,endtime=origin_time + tracelen,\
                     reject_channels_with_gaps=False, minimum_length=0.95, minimum_interstation_distance_in_m=10E3,\
                     channel=self.channel,exclude_stations=stations_to_exclude)

                mdl = MassDownloader(providers=[self.clientname])

                mdl.download(domain, restrictions, mseed_storage=datadirpath,\
                 stationxml_storage=stationdirpath)

    def Set_datapaths(self,
                      waveforms_path="waveforms",
                      station_path="stations"):
        '''Set the directory names where downloaded data can be found'''

        self.stationdirpath = station_path
        self.datadirpath = waveforms_path

    def CorrectResponse(self, resptype='displacement'):
        '''Correct downloaded data for insrument response'''

        if resptype == 'displacement':
            outtype = "DISP"
        elif resptype == 'velocity':
            outtype = "VEL"
        elif resptype == 'acceleration':
            outtype = "ACC"
        else:
            print(
                "User input correction unit not valid: use displacement, velocity or acceleration"
            )
            sys.exit(1)

        #get the station data
        station_path = '%s/*.xml' % self.stationdirpath
        stations = glob.glob(station_path)

        for station in stations:

            inv = op.read_inventory(station)

            stationname = station.split('/')[1][:-4]

            waveforms_path = '%s/%s*.mseed' % (self.datadirpath, stationname)
            waveforms = glob.glob(waveforms_path)

            stream = op.Stream()
            added_waveforms = []

            for waveform in waveforms:

                print("Adding waveform %s to stream" % waveform)

                try:
                    st = op.read(waveform, format='mseed')
                    stream += st[0]
                    added_waveforms.append(waveform)
                except:
                    print("Could not add %s to stream" % waveform)
                    continue

            print("\nCorrecting responses in stream\n")

            stream.remove_response(inventory=inv, output=outtype)

            #write corrected waveforms to mseed output

            i = 0
            for trace in stream:
                outname = '%s_%s.mseed' % (added_waveforms[i][:-6], outtype)
                trace.write(outname, format='mseed')
                i += 1
Exemple #42
0
#%% Get catalog
chan_type = 'BHZ' # e.g., BHZ
start_buff = 10   # Pre-event buffer
end_buff   = 3600 # Time after OT recorded
min_mag = 4.5     # Threshold to find event in IRIS catalog
etime = 'XXXX-XX-XXTXX:XX:00' # event has to be unique and follow time by less than 1 minute
#etime = '2010-03-24T03:06:00' # event has to be unique and follow time by less than 1 minute
min_dist = 0      # Minimum distance loaded, degrees
max_dist = 180    # Maximum distance loaded, degrees
verbose = 0       # 1 prints networks and stations
refine = 0        # Use refined location

client = Client('IRIS')
t = UTCDateTime(etime)
catalog = client.get_events(starttime = t, endtime = t + 60, minmagnitude = min_mag)
print('IRIS epicenter:',catalog)

if refine == 1:
	# refined time and hypocenter
	ev_lon   = XXX.XXX  # South Sandwich Islands
	ev_lat   = XXX.XXX
	ev_depth = XX.X
	t        = UTCDateTime('XXXX-XX-XXTXX:XX:XX.XX')
#	t        = UTCDateTime('XXXX-XX-XXTXX:XX:XX.XX')


	#  Overwrite IRIS location in catalog
	catalog[0].origins[0].longitude = ev_lon
	catalog[0].origins[0].latitude = ev_lat
	catalog[0].origins[0].depth = ev_depth
dir='Data'
if not os.path.exists(dir):
    os.makedirs(dir)
dir= dir+'/'+name
if not os.path.exists(dir):
    os.makedirs(dir)
dirresp=dir+'/Responsefiles'
if not os.path.exists(dirresp):
    os.makedirs(dirresp)

dir=dir+'/Originals'
if not os.path.exists(dir):
    os.makedirs(dir)   
   
# save event catalog
cat=irisclient.get_events(latitude=latitude, longitude=longitude, maxradius=maxrad, starttime=starttime,endtime=endtime,minmagnitude=minmag)
evtlatitude=cat[0].origins[0]['latitude']
evtlongitude=cat[0].origins[0]['longitude']
evtdepth=cat[0].origins[0]['depth']/1.e3 # convert to km from m
evstarttime=cat[0].origins[0].time

eventtime=cat[0].origins[0].time+lengthoftrace
if len(cat)>1:
    print('more than one event is selected')
    sys.exit()

# Select what stations are present at the time
inventory = irisclient.get_stations(starttime=evstarttime, endtime=eventtime)# Channel information does not seem to work ...,channel='BH*')
count = 0
inventory.get_response
#!/bin/env python

from obspy import UTCDateTime
from obspy.clients.fdsn import Client
from obspy.geodetics import locations2degrees
from getPwaveArrival import getPwaveArrival
import numpy as np


f = open("Output.txt", 'w')
client = Client("IRIS")
startTime = UTCDateTime("2016-01-01T00:00:00")
endTime = UTCDateTime("2017-05-26T00:00:00")
minMag = 7.0
EventCatalog = client.get_events(starttime=startTime,endtime=endTime,\
        minmagnitude=minMag)
staLat = 34.945910
staLon = -106.457200
#EventCatalog
EventCatalog.count()
for event in EventCatalog:
    print(event.origins)
    evLat=event.origins[0]['latitude']
    evLon=event.origins[0]['longitude']
    evDepth=event.origins[0]['depth']/1000.0
    evYear=event.time[0]
    print(evYear)
    
#    DegDist = locations2degrees(staLat,staLon,evLat,evLon)
#    pTime = getPwaveArrival(evDepth,DegDist)
#    dataStart = event.origins[0]['time']+pTime-10
Exemple #45
0
t = UTCDateTime("2011-03-11T05:46:23")  # Tohoku
st = client.get_waveforms("II", "PFO", "*", "LHZ", t + 10 * 60, t + 30 * 60)
print(st)
st.plot()
# -

# - again, waveform data is returned as a Stream object
# - for all custom processing workflows it does not matter if the data originates from a local file or from a web service
#
# #### Event Metadata
#
# The FDSN client can also be used to request event metadata:

t = UTCDateTime("2011-03-11T05:46:23")  # Tohoku
catalog = client.get_events(starttime=t - 100,
                            endtime=t + 24 * 3600,
                            minmagnitude=7)
print(catalog)
catalog.plot()

# Requests can have a wide range of constraints (see [ObsPy Documentation](http://docs.obspy.org/packages/autogen/obspy.fdsn.client.Client.get_events.html)):
#
# - time range
# - geographical (lonlat-box, circular by distance)
# - depth range
# - magnitude range, type
# - contributing agency

# #### Station Metadata
#
# Finally, the FDSN client can be used to request station metadata. Stations can be looked up using a wide range of constraints (see [ObsPy documentation](http://docs.obspy.org/packages/autogen/obspy.fdsn.client.Client.get_stations.html)):
        return ('mo')
    elif depth < 20.0:
        return ('yo')
    elif depth < 30.0:
        return ('go')
    elif depth < 40.0:
        return ('co')
    else:
        return ('bo')

#---DATA_FROM_FDSN---#
#get specified event
client = Client(base_url = "http://arclink.ethz.ch")
starttime = UTCDateTime("2016-01-01")
endtime = UTCDateTime()
cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=1)#, filename="requested_events.xml"
evtnum = cat.count()
print(type(cat)) #Catalog
print(cat) #add , CatalogObject.__str__(print_all=True) to print all events
#cat.plot() #add also resouces like: projection="local"
focaltime = []
hypolon = []
hypolat = []
hypodep = []
eqmag = []
for x in range(0, evtnum):
	event = cat[x]
	focaltime.append(event.origins[0].time)
	hypolon.append(event.origins[0].longitude)
	hypolat.append(event.origins[0].latitude)
	hypodep.append(event.origins[0].depth/1000)
def data_request(client_name, cat_client_name, start, end, minmag, net=None, scode="*", channels="*", minlat=None,
                 maxlat=None,minlon=None,maxlon=None, station_minlat=None,
                 station_maxlat=None, station_minlon=None, station_maxlon=None, mindepth=None, maxdepth=None, 
                 radialcenterlat=None, radialcenterlon=None, minrad=None, maxrad=None,
                 station_radialcenterlat=None, station_radialcenterlon=None, station_minrad=None, station_maxrad=None,
                 azimuth=None, baz=False, t_before_first_arrival=1, t_after_first_arrival=9, savefile=False, file_format='SAC'):
	"""
	Searches in a given Database for seismic data. Restrictions in terms of starttime, endtime, network etc can be made.
	If data is found it returns a stream variable, with the waveforms, an inventory with all station and network information
	and a catalog with the event information.

	:param client_name: Name of desired fdsn client, for a list of all clients see: 
		                https://docs.obspy.org/tutorial/code_snippets/retrieving_data_from_datacenters.html
	:type  client_name:  string

	:param cat_client_name: Name of Event catalog

	:type  cat_client_name: string

	:param start, end: starttime, endtime
	:type : UTCDateTime

	:param minmag: Minimum magnitude of event
	:type  minmag: float

	:param net: Network code for which to search data for
	:type  net: string

	:param scode: Station code for which to search data for
	:type  scode: string

	:param channels: Used channels of stations 
	:type  channels: string

	:param minlat, maxlat, minlon, maxlon: Coordinate-window of interest
	:type : float

	:param mindepth, maxdepth: depth information of event in km
	:type : float

	:param radialcenterlat, radialcenterlon: Centercoordinates of a radialsearch, if radialsearch=True
	:type : float

	:param minrad, maxrad: Minimum and maximum radii for radialsearch
	:type : float

	:param azimuth: Desired range of azimuths of event, station couples in deg as a list [minimum azimuth, maximum azimuth]
	:type  azimuth: list

	:param baz: Desired range of back-azimuths of event, station couples in deg as a list [minimum back azimuth, maximum back azimuth]
	:type  baz: list

	:param t_before_first_arrival, t_before_after_arrival: Length of the seismograms, startingpoint, minutes before 1st arrival and
															minutes after 1st arrival.
	:type  t_before_first_arrival, t_before_after_arrival: float, int
	
	:param savefile: if True, Stream, Inventory and Catalog will be saved local, in the current directory.
	:type  savefile: bool

	:param format: File-format of the data, for supported formats see: https://docs.obspy.org/packages/autogen/obspy.core.stream.Stream.write.html#obspy.core.stream.Stream.write
	:type  format: string
	
	returns

	:param: list_of_stream, Inventory, Catalog
	:type: list, obspy, obspy 



	### Example 1 ###

	from obspy import UTCDateTime
	from sipy.util.data_request import data_request

	start = UTCDateTime(2010,1,1,0,0)
	end = UTCDateTime(2010,12,31,0,0)
	minmag = 8
	station = '034A'
	list_of_stream, inventory, cat = data_request('IRIS', start, end, minmag, net='TA', scode=station)
	
	st = list_of_stream[0]
	st = st.select(channel='BHZ')
	st.normalize()
	inv = inventory[0]

	st.plot()
	inv.plot()
	cat.plot()

	### Example 2 ###

	from obspy import UTCDateTime
	from sipy.util.data_request import data_request

	start = UTCDateTime(2010,1,1,0,0)
	end = UTCDateTime(2010,12,31,0,0)
	minmag = 8
	station = '034A'
	client = 'IRIS'
	cat_client = 'globalcmt'
	list_of_stream, inventory, cat = data_request(client, cat_client, start, end, minmag, net='TA', scode=station)
	
	st = list_of_stream[0]
	st = st.select(channel='BHZ')
	st.normalize()
	inv = inventory[0]

	st.plot()
	inv.plot()
	cat.plot()

	"""

	data =[]
	stream = Stream()
	streamall = []
	

	#build in different approach for catalog search, using urllib

	if cat_client_name == 'globalcmt':
		catalog = request_gcmt(starttime=start, endtime=end, minmagnitude=minmag, mindepth=mindepth, maxdepth=maxdepth, minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon)
		client = Client(client_name)
	else:	
		client = Client(client_name)
		try:
			catalog = client.get_events(starttime=start, endtime=end, minmagnitude=minmag, mindepth=mindepth, maxdepth=maxdepth, latitude=radialcenterlat, longitude=radialcenterlon, minradius=minrad, maxradius=maxrad,minlatitude=minlat, maxlatitude=maxlat, minlongitude=minlon, maxlongitude=maxlon)

		except:
			print("No events found for given parameters.")
			return

	print("Following events found: \n")
	print(catalog)
	m = TauPyModel(model="ak135")
	Plist = ["P", "Pdiff", "p"]
	for event in catalog:
		print("\n")
		print("########################################")
		print("Looking for available data for event: \n")
		print(event.short_str())
		print("\n")

		origin_t = event.origins[0].time
		station_stime = UTCDateTime(origin_t - 3600*24)
		station_etime = UTCDateTime(origin_t + 3600*24)

		try:
			inventory = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
			print("Inventory found.")
		except:
			print("No Inventory found for given parameters")
			return
		
		for network in inventory:

			elat = event.origins[0].latitude
			elon = event.origins[0].longitude
			depth = event.origins[0].depth/1000.

			array_fits = True
			if azimuth or baz:
				cog=center_of_gravity(network)
				slat = cog['latitude']
				slon = cog['longitude']			
				epidist = locations2degrees(slat,slon,elat,elon)
				arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
							                        phase_list=Plist)

				P_arrival_time = arrivaltime[0]

				Ptime = P_arrival_time.time
				tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
				tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)


				center = geometrical_center(inv)
				clat = center['latitude']
				clon = center['longitude']
				if azimuth:
					print("Looking for events in the azimuth range of %f to %f" % (azimuth[0], azimuth[1]) )
					center_az = gps2dist_azimuth(clat, clon, elat, elon)[1]
					if center_az > azimuth[1] and center_az < azimuth[0]: 
						print("Geometrical center of Array out of azimuth bounds, \ncheking if single stations fit")
						array_fits = False

				elif baz:
					print("Looking for events in the back azimuth range of %f to %f" %(baz[0], baz[1]))
					center_baz = gps2dist_azimuth(clat, clon, elat, elon)[2]
					if center_baz > baz[1] and center_baz < baz[0]: 
						print("Geometrical center of Array out of back azimuth bounds, \ncheking if single stations fit")
						array_fits = False

			# If array fits to azimuth/back azimuth or no azimuth/back azimuth is given
			no_of_stations = 0
			if array_fits:

				for station in network:

					epidist = locations2degrees(station.latitude,station.longitude,elat,elon)
					arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
								                        phase_list=Plist)

					P_arrival_time = arrivaltime[0]

					Ptime = P_arrival_time.time
					tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
					tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)

					try:
						streamreq = client.get_waveforms(network=network.code, station=station.code, location='*', channel=channels, starttime=tstart, endtime=tend, attach_response=True)
						no_of_stations += 1
						print("Downloaded data for %i of %i available stations!" % (no_of_stations, network.selected_number_of_stations), end='\r' )
						sys.stdout.flush()
						stream 		   += streamreq
						try:
							if inventory_used:
								inventory_used 	+= client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
									
						except:
								inventory_used 	 = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
					except:
						continue


			# If not checking each station individually.
			else:
				for station in network:
					epidist = locations2degrees(station.latitude,station.longitude,elat,elon)
					arrivaltime = m.get_travel_times(source_depth_in_km=depth, distance_in_degree=epidist,
								                        phase_list=Plist)


					P_arrival_time = arrivaltime[0]

					Ptime = P_arrival_time.time
					tstart = UTCDateTime(event.origins[0].time + Ptime - t_before_first_arrival * 60)
					tend = UTCDateTime(event.origins[0].time + Ptime + t_after_first_arrival * 60)

					fit = False
					if azimuth:
						stat_az = gps2dist_azimuth(station.latitude, station.longitude, elat, elon)[1]
						if stat_az > azimuth[1] and stat_az < azimuth[0]: fit = True
					elif baz:
						stat_baz = gps2dist_azimuth(station.latitude, station.longitude, elat, elon)[2]
						if stat_baz > baz[1] and stat_baz < baz[0]: fit = True
					if fit:
						try:
							streamreq = client.get_waveforms(network = network.code, station = station.code, location='*', channel = channels, startime = tstart, endtime = tend, attach_response = True)
							no_of_stations += 1
							print("Downloaded data for %i of %i available stations!" % (no_of_stations, network.selected_number_of_stations), end='\r' )
							sys.stdout.flush()
							stream 		+= streamreq
							try:
								if inventory_used:
									inventory_used 	+= client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
							except:
									inventory_used 	 = client.get_stations(network=net, station=scode, level="station", starttime=station_stime, endtime=station_etime,
			 								minlatitude=station_minlat, maxlatitude=station_maxlat, minlongitude=station_minlon, maxlongitude=station_maxlon,
			 								latitude=station_radialcenterlat, longitude=station_radialcenterlon, minradius=station_minrad, maxradius=station_maxrad)
						except:

							continue

		try:
			if invall:
				invall += inventory
		except:
			invall 		= inventory

		attach_network_to_traces(stream, inventory)
		attach_coordinates_to_traces(stream, inventory, event)
		streamall.append(stream)
		stream = Stream()

	if savefile:
		stname = str(origin_t).split('.')[0] + ".MSEED"
		invname = stname + "_inv.xml"
		catname = stname + "_cat.xml"
		stream.write(stname, format=file_format)
		inventory.write(invname, format="STATIONXML")
		catalog.write(catname, format="QUAKEML")

	plt.ion()
	#invall.plot()
	#catalog.plot()
	plt.ioff()
	inventory = invall
	list_of_stream = streamall
	return(list_of_stream, inventory, catalog)
Exemple #48
0
def event(baseurl='IRIS',
          playback=None,
          days=1,
          qml=None,
          fin=None,
          tin=None,
          country=None,
          **kwargs):

    try:
        client = Client(baseurl)
        print("Using " + baseurl + "...", file=sys.stderr)
    except:
        print("fdsn client failed", file=sys.stderr)
        from obspy.clients.fdsn.header import URL_MAPPINGS
        for key in sorted(URL_MAPPINGS.keys()):
            print("{0:<7} {1}".format(key, URL_MAPPINGS[key]), file=sys.stderr)

        sys.exit()

    try:
        kwargs['starttime'] = UTCDateTime(kwargs['starttime'])
        kwargs['endtime'] = UTCDateTime(kwargs['endtime'])
    except:
        kwargs['starttime'] = UTCDateTime() - float(days) * 24 * 60 * 60
        kwargs['endtime'] = UTCDateTime()

    print('kwargs:', kwargs, file=sys.stderr)
    try:
        cat = client.get_events(**kwargs)
    except:
        print('No response.', file=sys.stderr)
        sys.exit()

    if country is not None:
        ok = False
        limit = len(cat.events)
        kwargs['offset'] = len(cat.events)
        while not ok:
            ok = True
            remove = []
            kwargs['limit'] = 0
            for i, e in enumerate(cat.events):
                lalo = [
                    e.preferred_origin().latitude,
                    e.preferred_origin().longitude
                ]
                gcode = geocoder.osm(lalo, method='reverse').json
                if gcode['country_code'].lower() not in country.lower():
                    kwargs['limit'] += 1
                    ok = False
                    remove += [e]
                    print('removing %d (%s, %s): %s (requesting %d after %d)' %
                          (i, lalo[0], lalo[1], gcode['country_code'],
                           kwargs['limit'], kwargs['offset']),
                          file=sys.stderr)
            if not ok:
                for e in remove:
                    cat.events.remove(e)
                if len(cat.events) >= limit:
                    print('Clean stable catalog of %d events' %
                          len(cat.events),
                          file=sys.stderr)
                    break
                print('kwargs:', kwargs, file=sys.stderr)
                try:
                    tmp = client.get_events(**kwargs)
                except:
                    print('No more events than %d' % len(cat.events),
                          file=sys.stderr)
                    break
                cat += tmp
                kwargs['offset'] += len(tmp.events)
    for e in cat.events:
        print("Event \"%s\":\t%s" % (str(e.resource_id), e.short_str()),
              file=sys.stderr)

    if qml is not None:
        cat.write(qml, format='SC3ML')

    if fin is not None:
        with open(fin, 'w') as f:
            f.write('\n'.join([str(e.resource_id) for e in cat.events]) + '\n')
    if tin is not None:
        with open(tin, 'w') as f:
            for e in cat.events:
                o = e.preferred_origin_id.get_referred_object()
                f.write('%s %s\n' %
                        ((o.time - 60 * 3 / 9).strftime("%Y-%m-%dT%H:%M:%S"),
                         (o.time + 60 * 6 / 9).strftime("%Y-%m-%dT%H:%M:%S")))

    if playback is not None:
        if 'evid' in playback:
            for e in cat.events:
                print(playback % ("\"" + str(e.resource_id) + "\""))
        else:
            for e in cat.events:
                o = e.preferred_origin_id.get_referred_object()
                print(
                    playback %
                    ((o.time - 60 * 3 / 9).strftime("\"%Y-%m-%d %H:%M:%S\""),
                     (o.time + 60 * 6 / 9).strftime("\"%Y-%m-%d %H:%M:%S\"")))