示例#1
0
文件: util.py 项目: 717524640/obspyck
def get_event_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = FDSNClient("NERIES")
        events = client.get_events(starttime=starttime - 20 * 60,
                                   endtime=endtime)
        for ev in events[::-1]:
            has_arrivals = False
            origin = ev.origins[0]
            origin_time = origin.time
            lon1 = origin.longitude
            lat1 = origin.latitude
            depth = abs(origin.depth / 1e3)
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events[:] = events[:-1]
    except Exception as e:
        msg = ("Problem while fetching events or determining theoretical "
               "phases: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
示例#2
0
文件: util.py 项目: 717524640/obspyck
def get_event_info(starttime, endtime, streams):
    events = []
    arrivals = {}
    try:
        client = FDSNClient("NERIES")
        events = client.get_events(starttime=starttime - 20 * 60,
                                   endtime=endtime)
        for ev in events[::-1]:
            has_arrivals = False
            origin = ev.origins[0]
            origin_time = origin.time
            lon1 = origin.longitude
            lat1 = origin.latitude
            depth = abs(origin.depth / 1e3)
            for st in streams:
                sta = st[0].stats.station
                lon2 = st[0].stats.coordinates['longitude']
                lat2 = st[0].stats.coordinates['latitude']
                dist = locations2degrees(lat1, lon1, lat2, lon2)
                tts = getTravelTimes(dist, depth)
                list_ = arrivals.setdefault(sta, [])
                for tt in tts:
                    tt['time'] = origin_time + tt['time']
                    if starttime < tt['time'] < endtime:
                        has_arrivals = True
                        list_.append(tt)
            if not has_arrivals:
                events[:] = events[:-1]
    except Exception as e:
        msg = ("Problem while fetching events or determining theoretical "
               "phases: %s: %s" % (e.__class__.__name__, str(e)))
        return None, None, msg
    return events, arrivals, None
示例#3
0
def get_events():
    print('Read event file')
    try:
        return readEvents(evname)
    except:
        pass
    client = FSDNClient('NERIES')
    events = client.get_events(**event_kwargs)
    events.events.sort(key=lambda e: e.origins[0].time)
    events.write(evname, 'QUAKEML')
    return events
示例#4
0
 def get_events(self):
     """
     Method to fetch updated list of events to use in plot.
     """
     with self.lock:
         start = min([tr.stats.starttime for tr in self.stream])
         end = max([tr.stats.endtime for tr in self.stream])
     neries_client = Client("NERIES")
     events = neries_client.get_events(starttime=start, endtime=end,
                                       minmagnitude=self.args.events)
     return events
示例#5
0
def get_events():
    print('Read event file')
    try:
        return readEvents(evname)
    except:
        pass
    client = FSDNClient('NERIES')
    events = client.get_events(**event_kwargs)
    events.events.sort(key=lambda e: e.origins[0].time)
    events.write(evname, 'QUAKEML')
    return events
 def get_events(self):
     """
     Method to fetch updated list of events to use in plot.
     """
     with self.lock:
         start = min([tr.stats.starttime for tr in self.stream])
         end = max([tr.stats.endtime for tr in self.stream])
     neries_emsc = Client("EMSC")
     events = neries_emsc.get_events(starttime=start, endtime=end,
                                       minmagnitude=self.args.events)
     return events
示例#7
0
def get_events():
    try:
        return readEvents(evname)
    except:
        pass
    client = Client()
    events = client.get_events(starttime=t1, endtime=t2, latitude=lat,
                               longitude=lon, minradius=20, maxradius=100,
                               minmagnitude=6.)
    events.write(evname, 'QUAKEML')
    return events
示例#8
0
def get_IRIS_WebServices_Catalog():
    '''Get the IRIS catalog and print the response'''

    client = Client('IRIS')
    starttime = obspy.core.utcdatetime.UTCDateTime('2014-01-01')
    endtime = obspy.core.utcdatetime.UTCDateTime('2014-05-25')

    catalog = client.get_events(starttime=starttime,
                                endtime=endtime,
                                minmagnitude=4,
                                catalog='ISC')
    print('catalog', catalog)
示例#9
0
def test_gmt_catalog():
    (options, args, parser) = command_parse()
    input_dics = read_input_command(parser)
    # Changing the input_dics values for testing
    input_dics['min_date'] = UTCDateTime('2011-03-01')
    input_dics['max_date'] = UTCDateTime('2011-03-20')
    input_dics['min_mag'] = 8.9

    evlatmin = input_dics['evlatmin']
    evlatmax = input_dics['evlatmax']
    evlonmin = input_dics['evlonmin']
    evlonmax = input_dics['evlonmax']

    evlat = input_dics['evlat']
    evlon = input_dics['evlon']
    evradmax = input_dics['evradmax']
    evradmin = input_dics['evradmin']

    client_fdsn = Client_fdsn(base_url=input_dics['event_catalog'])
    events_QML = client_fdsn.get_events(
        minlatitude=evlatmin,
        maxlatitude=evlatmax,
        minlongitude=evlonmin,
        maxlongitude=evlonmax,
        latitude=evlat,
        longitude=evlon,
        maxradius=evradmax,
        minradius=evradmin,
        mindepth=input_dics['min_depth'],
        maxdepth=input_dics['max_depth'],
        starttime=input_dics['min_date'],
        endtime=input_dics['max_date'],
        minmagnitude=input_dics['min_mag'],
        maxmagnitude=input_dics['max_mag'],
        orderby='time',
        catalog=None,
        magnitudetype=input_dics['mag_type'])

    assert events_QML[0].preferred_origin().latitude == 38.2963
    assert events_QML[0].preferred_origin().longitude == 142.498
    assert events_QML[0].preferred_origin().depth == 19700.0
示例#10
0
def test_gmt_catalog():
    (options, args, parser) = command_parse()
    input_dics = read_input_command(parser)
    # Changing the input_dics values for testing
    input_dics['min_date'] = UTCDateTime('2011-03-01')
    input_dics['max_date'] = UTCDateTime('2011-03-20')
    input_dics['min_mag'] = 8.9

    evlatmin = input_dics['evlatmin']
    evlatmax = input_dics['evlatmax']
    evlonmin = input_dics['evlonmin']
    evlonmax = input_dics['evlonmax']

    evlat = input_dics['evlat']
    evlon = input_dics['evlon']
    evradmax = input_dics['evradmax']
    evradmin = input_dics['evradmin']

    client_fdsn = Client_fdsn(base_url=input_dics['event_url'])
    events_QML = client_fdsn.get_events(minlatitude=evlatmin,
                                        maxlatitude=evlatmax,
                                        minlongitude=evlonmin,
                                        maxlongitude=evlonmax,
                                        latitude=evlat,
                                        longitude=evlon,
                                        maxradius=evradmax,
                                        minradius=evradmin,
                                        mindepth=input_dics['min_depth'],
                                        maxdepth=input_dics['max_depth'],
                                        starttime=input_dics['min_date'],
                                        endtime=input_dics['max_date'],
                                        minmagnitude=input_dics['min_mag'],
                                        maxmagnitude=input_dics['max_mag'],
                                        orderby='time',
                                        catalog=input_dics['event_catalog'],
                                        magnitudetype=input_dics['mag_type'])

    assert events_QML[0].preferred_origin().latitude == 38.2963
    assert events_QML[0].preferred_origin().longitude == 142.498
    assert events_QML[0].preferred_origin().depth == 19700.0
示例#11
0
#minmag=0.0
#maxmag=4.0

#tmin=UTCDateTime("2013-11-01 00:00:00")
#tmin=UTCDateTime(iyear+"-"+imm+"-"+idd+" "+ihh+":"+imi+":"+ise)
#print tmin

#tmax=UTCDateTime("2013-12-01 00:00:00")
#tmax=UTCDateTime(fyear+"-"+fmm+"-"+fdd+" "+fhh+":"+fmi+":"+fse)
#print tmax

catalog=fdsn.get_events(starttime=tmin,
			endtime=tmax,
			includearrivals=True,
			minmagnitude=mmin,
			maxmagnitude=mmax,
			latitude=lat,
			longitude=lon,
			minradius=rmin,
			maxradius=rmax)

#catalog=fdsn.get_events(starttime=t0, endtime=t1, includearrivals=True,
#			includepicks=True, format="catalog")

#print catalog.__str__(print_all=True)

#cat2=catalog.filter("magnitude <= minmag")
#print cat2.__str__(print_all=True)

f=open("evlist.txt","w")
for event in catalog:
示例#12
0
        os.makedirs(path)
        return True
    else:
        print path + ' exists'
        return False

if __name__ == '__main__':
    client = Client("IRIS")
    network = "XF"
    starttime = UTCDateTime("2003-06-01")
    endtime = UTCDateTime("2003-11-01")
    # endtime = UTCDateTime("1999-05-19")
    # endtime = UTCDateTime("1998-12-04")
    # endtime = UTCDateTime("1998-12-05")    

    events = client.get_events(starttime=starttime, endtime=endtime,
                               minmagnitude=5.5, catalog="ISC")
    # events.plot()

    stations = client.get_stations(network=network, station="H*",
                                   starttime=starttime, endtime=endtime,
                                   level="response")
    # stations.plot()
    # stations.plot(projection=u'local', resolution=u'f')
    default_dir = os.getcwd() + '/data_60_120/'
    for event in events:
        origin = event.origins[0]
        print origin
        event_latitude = origin.latitude
        event_longitude = origin.longitude
        event_depth = origin.depth  # km
        event_time = origin.time
示例#13
0
文件: teste.py 项目: mprocha/scripts
from obspy.core.event import readEvents

fdsn=Client(base_url="http://datasisint.unb.br:8080")

tmin = UTCDateTime("2015-324")
tmax = UTCDateTime("2015-325")
mmin=1
mmax=9
rmin=3
rmax=10
lat=-11.6
lon=-56.7

#print str(tmin)+" "+str(tmax)+" "+str(mmin)+" "+str(mmax)+" "+str(rmin)+" "+str(rmax)+" "+str(lat)+" "+str(lon)

catalog=fdsn.get_events(starttime=tmin, endtime=tmax)

#, includearrivals=True, minmagnitude=str(mmin), maxmagnitude=str(mmax), latitude=str(lat), longitude=str(lon), minradius=str(rmin), maxradius=str(rmax))

for event in catalog:
        evpref=event.preferred_origin()
	author=evpref.creation_info.author
        #evid=evpref.resource_id
        evid=event.resource_id.id.replace("smi:scs/0.6/","")
        evtype=str(event.event_type).replace(" ","-")
	 

        print str(evid)+" "+author+" "+evtype
#print catalog

示例#14
0
def get_cat(data_center=None, **kwargs):
    '''
    Function to get catalog data from different data center
    data_center - specify the data center i.e. 'IRIS'
    Other arguments you can use:
    :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional
        :param starttime: Limit to events on or after the specified start time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional
        :param endtime: Limit to events on or before the specified end time.
        :type minlatitude: float, optional
        :param minlatitude: Limit to events with a latitude larger than the
            specified minimum.
        :type maxlatitude: float, optional
        :param maxlatitude: Limit to events with a latitude smaller than the
            specified maximum.
        :type minlongitude: float, optional
        :param minlongitude: Limit to events with a longitude larger than the
            specified minimum.
        :type maxlongitude: float, optional
        :param maxlongitude: Limit to events with a longitude smaller than the
            specified maximum.
        :type latitude: float, optional
        :param latitude: Specify the latitude to be used for a radius search.
        :type longitude: float, optional
        :param longitude: Specify the longitude to the used for a radius
            search.
        :type minradius: float, optional
        :param minradius: Limit to events within the specified minimum number
            of degrees from the geographic point defined by the latitude and
            longitude parameters.
        :type maxradius: float, optional
        :param maxradius: Limit to events within the specified maximum number
            of degrees from the geographic point defined by the latitude and
            longitude parameters.
        :type mindepth: float, optional
        :param mindepth: Limit to events with depth more than the specified
            minimum.
        :type maxdepth: float, optional
        :param maxdepth: Limit to events with depth less than the specified
            maximum.
        :type minmagnitude: float, optional
        :param minmagnitude: Limit to events with a magnitude larger than the
            specified minimum.
        :type maxmagnitude: float, optional
        :param maxmagnitude: Limit to events with a magnitude smaller than the
            specified maximum.
        :type magnitudetype: str, optional
        :param magnitudetype: Specify a magnitude type to use for testing the
            minimum and maximum limits.
        :type includeallorigins: bool, optional
        :param includeallorigins: Specify if all origins for the event should
            be included, default is data center dependent but is suggested to
            be the preferred origin only.
        :type includeallmagnitudes: bool, optional
        :param includeallmagnitudes: Specify if all magnitudes for the event
            should be included, default is data center dependent but is
            suggested to be the preferred magnitude only.
        :type includearrivals: bool, optional
        :param includearrivals: Specify if phase arrivals should be included.
        :type eventid: str or int (dependent on data center), optional
        :param eventid: Select a specific event by ID; event identifiers are
            data center specific.
        :type limit: int, optional
        :param limit: Limit the results to the specified number of events.
        :type offset: int, optional
        :param offset: Return results starting at the event count specified,
            starting at 1.
        :type orderby: str, optional
        :param orderby: Order the result by time or magnitude with the
            following possibilities:
                * time: order by origin descending time
                * time-asc: order by origin ascending time
                * magnitude: order by descending magnitude
                * magnitude-asc: order by ascending magnitude
        :type catalog: str, optional
        :param catalog: Limit to events from a specified catalog
        :type contributor: str, optional
        :param contributor: Limit to events contributed by a specified
            contributor.
        :type updatedafter: :class:`~obspy.core.utcdatetime.UTCDateTime`,
            optional
        :param updatedafter: Limit to events updated after the specified time.
        :type filename: str or open file-like object
        :param filename: If given, the downloaded data will be saved there
            instead of being parse to an ObsPy object. Thus it will contain the
            raw data from the webservices.
    
    '''
    #get the catalog
    if data_center is None:
        data_center = 'USGS'

    client = Client(data_center)
    sio = StringIO()
    #save the catalog into a StringIO object
    cat = client.get_events(filename=sio, **kwargs)

    #specify the entries you want to replace with (the inconsistent ones) in the following dictionary
    rep = {
        "quarry_blast": "quarry blast",
        "quarry": "quarry blast",
        "quarry blast_blast": "quarry blast"
    }

    #replace the multiple entries, and save the modified entries into StringIO object
    rep = dict((re.escape(k), v) for k, v in rep.iteritems())
    pattern = re.compile("|".join(rep.keys()))

    sio.seek(0)
    sio2 = StringIO()
    sio2.write(pattern.sub(lambda m: rep[re.escape(m.group(0))], sio.buf))

    #read the catalog from this StringIO object
    sio2.seek(0)
    cat = readEvents(sio2)
    return cat
示例#15
0
    def test_download_urls_for_custom_mapping(self, download_url_mock):
        """
        Tests the downloading of data with custom mappings.
        """
        base_url = "http://example.com"

        # More extensive mock setup simulation service discovery.
        def custom_side_effects(*args, **kwargs):
            if "version" in args[0]:
                return 200, "1.0.200"
            elif "event" in args[0]:
                with open(os.path.join(
                        self.datapath, "2014-01-07_iris_event.wadl"),
                        "rb") as fh:
                    return 200, fh.read()
            elif "station" in args[0]:
                with open(os.path.join(
                        self.datapath,
                        "2014-01-07_iris_station.wadl"), "rb") as fh:
                    return 200, fh.read()
            elif "dataselect" in args[0]:
                with open(os.path.join(
                        self.datapath,
                        "2014-01-07_iris_dataselect.wadl"), "rb") as fh:
                    return 200, fh.read()
            return 404, None

        download_url_mock.side_effect = custom_side_effects

        # Some custom urls
        base_url_event = "http://other_url.com/beta/event_service/11"
        base_url_station = "http://some_url.com/beta2/station/7"
        base_url_ds = "http://new.com/beta3/dataselect/8"

        # An exception will be raised if not actual WADLs are returned.
        # Catch warnings to avoid them being raised for the tests.
        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            c = Client(base_url=base_url, service_mappings={
                "event": base_url_event,
                "station": base_url_station,
                "dataselect": base_url_ds,
            })
        for warning in w:
            self.assertTrue("Could not parse" in str(warning) or
                            "cannot deal with" in str(warning))

        # Test the dataselect downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_waveforms("A", "B", "C", "D", UTCDateTime() - 100,
                            UTCDateTime())
        except:
            pass
        self.assertTrue(
            base_url_ds in download_url_mock.call_args_list[0][0][0])

        # Test the station downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_stations()
        except:
            pass
        self.assertTrue(
            base_url_station in download_url_mock.call_args_list[0][0][0])

        # Test the event downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_events()
        except:
            pass
        self.assertTrue(
            base_url_event in download_url_mock.call_args_list[0][0][0])
示例#16
0
#convert start date from datetime object to a UTCDateTime object
start = UTCDateTime(date_start)

# set minimum magnitude threshold to search for.
min_mag = 2.0

minlat = -39.1 #minlatitude
maxlat = -37.0 # maxlatitude 
minlon = 143.5 #minlongitude 
maxlon = 147.0 #maxlongitude 

cat = client.get_events(starttime=date_start, 
                        endtime=date_end, 
                        minlatitude=minlat,
                        maxlatitude=maxlat,
                        minlongitude=minlon,
                        maxlongitude=maxlon,
                        minmagnitude=min_mag)

#print(cat)

cat.plot()
print(cat.__str__(print_all=True))

net = 'AU' 

stat = 'TOO'

date_start = UTCDateTime("2003-10-18T10:29:26.580000Z")
示例#17
0
@author: boland
"""

from obspy.fdsn.header import URL_MAPPINGS
from obspy import UTCDateTime
from obspy.fdsn import Client

# create example start and end times for event search
starttime = UTCDateTime('2014-01-01T00:00.000')
endtime = UTCDateTime('2015-01-01T00:00.000')

endtime = UTCDateTime('2014-02-01T00:00.000')

# create list of possible servers to find earthquake events
server_list = []
for key in sorted(URL_MAPPINGS.keys()):
    server_list.append(key)

for server in server_list:
    print server
    client = Client(server)
    try:
        cat = client.get_events(starttime=starttime,
                                endtime=endtime,
                                minmagnitude=4)  #, catalog="ISC")
        print cat
        cat.plot()
    except:
        continue

print "done"
示例#18
0
#convert start date from datetime object to a UTCDateTime object
start = UTCDateTime(date_start)

# set minimum magnitude threshold to search for.
min_mag = 2.0

minlat = -39.1  #minlatitude
maxlat = -37.0  # maxlatitude
minlon = 143.5  #minlongitude
maxlon = 147.0  #maxlongitude

cat = client.get_events(starttime=date_start,
                        endtime=date_end,
                        minlatitude=minlat,
                        maxlatitude=maxlat,
                        minlongitude=minlon,
                        maxlongitude=maxlon,
                        minmagnitude=min_mag)

#print(cat)

cat.plot()
print(cat.__str__(print_all=True))

net = 'AU'

stat = 'TOO'

date_start = UTCDateTime("2003-10-18T10:29:26.580000Z")
示例#19
0
    def test_download_urls_for_custom_mapping(self, download_url_mock):
        """
        Tests the downloading of data with custom mappings.
        """
        base_url = "http://example.com"

        # More extensive mock setup simulation service discovery.
        def custom_side_effects(*args, **kwargs):
            if "version" in args[0]:
                return 200, "1.0.200"
            elif "event" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_event.wadl")) as fh:
                    return 200, fh.read()
            elif "station" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_station.wadl")) as fh:
                    return 200, fh.read()
            elif "dataselect" in args[0]:
                with open(
                        os.path.join(self.datapath,
                                     "2014-01-07_iris_dataselect.wadl")) as fh:
                    return 200, fh.read()
            return 404, None

        download_url_mock.side_effect = custom_side_effects

        # Some custom urls
        base_url_event = "http://other_url.com/beta/event_service/11"
        base_url_station = "http://some_url.com/beta2/station/7"
        base_url_ds = "http://new.com/beta3/dataselect/8"
        # An exception will be raised if not actual WADLs are returned.
        c = Client(base_url=base_url,
                   service_mappings={
                       "event": base_url_event,
                       "station": base_url_station,
                       "dataselect": base_url_ds,
                   })

        # Test the dataselect downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_waveforms("A", "B", "C", "D",
                            UTCDateTime() - 100, UTCDateTime())
        except:
            pass
        self.assertTrue(
            base_url_ds in download_url_mock.call_args_list[0][0][0])

        # Test the station downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_stations()
        except:
            pass
        self.assertTrue(
            base_url_station in download_url_mock.call_args_list[0][0][0])

        # Test the event downloading.
        download_url_mock.reset_mock()
        download_url_mock.side_effect = None
        download_url_mock.return_value = 404, None
        try:
            c.get_events()
        except:
            pass
        self.assertTrue(
            base_url_event in download_url_mock.call_args_list[0][0][0])
示例#20
0
#!/usr/bin/env python

#This speeds up the process of fetching seismic data: make a global catalog of events to access. This takes a very long time to run,
#but creates a file of up-to-date earthquake information that can be accessed by the trace fetch program

from obspy.fdsn import Client
from obspy import UTCDateTime
import datetime

today = datetime.date.today()

client = Client('IRIS')
t1 = UTCDateTime(1970, 1, 1)
t2 = UTCDateTime(today.year, today.month, today.day)
client.get_events(starttime=t1,
                  endtime=t2,
                  filename='global_quake_cat.dat',
                  minmagnitude=5.0)  #all global events in the catalog

#write the parsed version of this file,ready to manipulate.
outfile = open('globalquake_parsed.dat', 'w')
infile = open('global_quake_cat.dat', 'r')
lines = infile.readlines()
infile.close()
depths = []
lats = []
lons = []
times = []
mags = []
for i, j in enumerate(lines):
    if j.strip() == '<depth>':
        depth = float(
示例#21
0
def get_cat(data_center = None, **kwargs):
    '''
    Function to get catalog data from different data center
    data_center - specify the data center i.e. 'IRIS'
    Other arguments you can use:
    :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional
        :param starttime: Limit to events on or after the specified start time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional
        :param endtime: Limit to events on or before the specified end time.
        :type minlatitude: float, optional
        :param minlatitude: Limit to events with a latitude larger than the
            specified minimum.
        :type maxlatitude: float, optional
        :param maxlatitude: Limit to events with a latitude smaller than the
            specified maximum.
        :type minlongitude: float, optional
        :param minlongitude: Limit to events with a longitude larger than the
            specified minimum.
        :type maxlongitude: float, optional
        :param maxlongitude: Limit to events with a longitude smaller than the
            specified maximum.
        :type latitude: float, optional
        :param latitude: Specify the latitude to be used for a radius search.
        :type longitude: float, optional
        :param longitude: Specify the longitude to the used for a radius
            search.
        :type minradius: float, optional
        :param minradius: Limit to events within the specified minimum number
            of degrees from the geographic point defined by the latitude and
            longitude parameters.
        :type maxradius: float, optional
        :param maxradius: Limit to events within the specified maximum number
            of degrees from the geographic point defined by the latitude and
            longitude parameters.
        :type mindepth: float, optional
        :param mindepth: Limit to events with depth more than the specified
            minimum.
        :type maxdepth: float, optional
        :param maxdepth: Limit to events with depth less than the specified
            maximum.
        :type minmagnitude: float, optional
        :param minmagnitude: Limit to events with a magnitude larger than the
            specified minimum.
        :type maxmagnitude: float, optional
        :param maxmagnitude: Limit to events with a magnitude smaller than the
            specified maximum.
        :type magnitudetype: str, optional
        :param magnitudetype: Specify a magnitude type to use for testing the
            minimum and maximum limits.
        :type includeallorigins: bool, optional
        :param includeallorigins: Specify if all origins for the event should
            be included, default is data center dependent but is suggested to
            be the preferred origin only.
        :type includeallmagnitudes: bool, optional
        :param includeallmagnitudes: Specify if all magnitudes for the event
            should be included, default is data center dependent but is
            suggested to be the preferred magnitude only.
        :type includearrivals: bool, optional
        :param includearrivals: Specify if phase arrivals should be included.
        :type eventid: str or int (dependent on data center), optional
        :param eventid: Select a specific event by ID; event identifiers are
            data center specific.
        :type limit: int, optional
        :param limit: Limit the results to the specified number of events.
        :type offset: int, optional
        :param offset: Return results starting at the event count specified,
            starting at 1.
        :type orderby: str, optional
        :param orderby: Order the result by time or magnitude with the
            following possibilities:
                * time: order by origin descending time
                * time-asc: order by origin ascending time
                * magnitude: order by descending magnitude
                * magnitude-asc: order by ascending magnitude
        :type catalog: str, optional
        :param catalog: Limit to events from a specified catalog
        :type contributor: str, optional
        :param contributor: Limit to events contributed by a specified
            contributor.
        :type updatedafter: :class:`~obspy.core.utcdatetime.UTCDateTime`,
            optional
        :param updatedafter: Limit to events updated after the specified time.
        :type filename: str or open file-like object
        :param filename: If given, the downloaded data will be saved there
            instead of being parse to an ObsPy object. Thus it will contain the
            raw data from the webservices.
    
    '''
    #get the catalog
    if data_center is None:
        data_center = 'USGS'
        
    client = Client(data_center)
    sio = StringIO()
    #save the catalog into a StringIO object
    cat = client.get_events(filename=sio, **kwargs)
    
    #specify the entries you want to replace with (the inconsistent ones) in the following dictionary
    rep = {"quarry_blast": "quarry blast", "quarry": "quarry blast", "quarry blast_blast":"quarry blast" }
    
    #replace the multiple entries, and save the modified entries into StringIO object
    rep = dict((re.escape(k), v) for k, v in rep.iteritems())
    pattern = re.compile("|".join(rep.keys()))
    
    sio.seek(0)
    sio2 = StringIO()
    sio2.write(pattern.sub(lambda m: rep[re.escape(m.group(0))], sio.buf))
    
    #read the catalog from this StringIO object
    sio2.seek(0)
    cat = readEvents(sio2)
    return cat
示例#22
0
# enter the information for catalogue 1 search
minlat, maxlat, minlon, maxlon = (-40.0, -12.5, 113.0, 154.0) 

event_list = []

for c in client_list:
    print "Processing events from the {} catalogue ... \n".format(c)


    try:
        client = Client(c)

        catalogue = client.get_events(starttime=t1, endtime=t2,
                                           minlatitude=minlat,
                                           maxlatitude=maxlat,
                                           minlongitude=minlon, 
                                           maxlongitude=maxlon)

            
        for i in catalogue:
            event_list.append(i)
            
    except Exception as error: 
        print error
        

print event_list
 
 
示例#23
0
def get_quakes(startcoors,endcoors,minmag=5.0):
    '''Get earthquakes within a region around the start and end coordinates. These will be plotted on the section, with x distance'''

    client = Client('USGS')

    #Only get intermediate depth quakes, or deeper
    mindepth = 60
    boxHW = 0.5

    quakefile = open('quakedata.dat','w')

    startlon = startcoors[0] - boxHW
    startlat = startcoors[1] + boxHW

    endlon = endcoors[0] + boxHW
    endlat = endcoors[1] - boxHW

    minlon = min(startlon,endlon)
    maxlon = max(startlon,endlon)

    minlat = min(startlat,endlat)
    maxlat = max(startlat,endlat)

    starttime = '1970-01-01'
    endtime = str(datetime.datetime.today()).split(' ')[0]
    
    #print startcoors,endcoors
    
    #print minlon,minlat,maxlon,maxlat

    print '---------------------\nUsing Obspy to get quakes\n---------------------'

    quakecat = client.get_events(starttime=UTCDateTime(starttime), endtime=UTCDateTime(endtime), minlongitude=minlon, maxlongitude=maxlon, minlatitude=minlat, maxlatitude=maxlat, minmagnitude=minmag,mindepth=mindepth)
    
    #Get the moment tensors for these events, if they exist
    #Currenlty not working 
    #quakes,mts = cat2list(quakecat)
    #focmecs = [row[4:] for row in mts]

    for event in quakecat:

        evlon = event.origins[0].longitude
        evlat = event.origins[0].latitude
        evdep = event.origins[0].depth

        quakefile.write('%s %s %s\n' %(evlon,evlat,evdep))

    quakefile.close()

    #Work out the distance from each quake to the profile line, and write to a file 
    #Create the section coordinates. Should make some sort of auto-decision about the spacing
    sectionname = 'tmp_toposection.dat'

    print '---------------------\nMaking section through topography\n---------------------'

    os.system('gmt project -C%g/%g -E%g/%g -G10 -Q > %s' %(minlon,minlat,maxlon,maxlat,sectionname))
    os.system('gmt grdtrack %s -G%s > gridsectiontopo.dat' %(sectionname,topofile))

    #Open the topo file and extract the longest distance. This will be used to scale the quake locations

    infile = open('gridsectiontopo.dat','r')
    lines = infile.readlines()
    infile.close()

    topoX = []
    topoY = []

    for line in lines:
        vals = line.split()
        topoX.append(float(vals[2]))
        topoY.append(float(vals[3]))

    maxdist = topoX[-1]
    topoX = np.array(topoX)
    topoY = np.array(topoY)

    print '---------------------\nGetting quake distances\n---------------------'

    #Make a file containing quakelon, quakelat, dist, and dist along profile
    os.system('gmt mapproject quakedata.dat -Lgridsectiontopo.dat/k > quake_dist.dat')

    #Reorder the columns and do another grdtrack to get distance along the profile
    os.system("awk '{print $5,$6,$1,$2,$3,$4}' quake_dist.dat > quaketmp.dat")
    os.system("rm quake_dist.dat")

    #Now, calculate distance along the profile from the start point
    os.system('gmt mapproject quaketmp.dat -G%g/%g/k > quake_points.dat' %(minlon,minlat))
    os.system('rm quaketmp.dat')

    #Now, open the newly created file and grid section file, and pull the distance data
    infile1 = open('quake_points.dat','r')
    lines1 = infile1.readlines()
    infile1.close()

    Xdistances_quakes = []
    Ydepths_quakes = []

    for line in lines1:
        vals = line.split(' ')
        try:

            evlon = float(vals[0].split('\t')[-1])
            evlat = float(vals[1])
            evdep = float(vals[2])
            evdist = float(vals[3].split('\t')[-2])
            evdistalong = float(vals[3].split('\t')[-1])

            #Only keep if the distance between the event and the profile line is less then 50km
            if evdist <= 50:
                Xdistances_quakes.append(evdistalong)
                Ydepths_quakes.append(-evdep/1000.0)

            #for some reason, some depths don't exist, so use this try; except statement
        except:
            continue

    os.system('rm quake_points.dat')

    return Xdistances_quakes, Ydepths_quakes, maxdist, topoX, topoY
示例#24
0
database_name = '/storage/MASTERS/CONFIGURATIONS/S_NETWORK/INPUT/DATABASES/timeline.db'

# enter the information for catalogue 1 search
minlat, maxlat, minlon, maxlon = (-40.0, -12.5, 113.0, 154.0)

event_list = []

for c in client_list:
    print "Processing events from the {} catalogue ... \n".format(c)

    try:
        client = Client(c)

        catalogue = client.get_events(starttime=t1,
                                      endtime=t2,
                                      minlatitude=minlat,
                                      maxlatitude=maxlat,
                                      minlongitude=minlon,
                                      maxlongitude=maxlon)

        for i in catalogue:
            event_list.append(i)

    except Exception as error:
        print error

print event_list

if len(event_list) > 0:
    final_catalogue = Catalog(events=event_list)

    print final_catalogue