コード例 #1
0
ファイル: eventmaps.py プロジェクト: vunnithan/shakeevents
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt

iris = Client("IRIS")
t2 = UTCDateTime.now()
t1 = t2 - timedelta(days=30)

cat = Catalog()
cat2 = Catalog()

try:
    cat += iris.get_events(starttime=t1,
                           endtime=t2,
                           latitude=YOUR_LATITUDE,
                           longitude=YOUR_LONGITUDE,
                           maxradius=15)
except:
    pass

try:
    cat2 += iris.get_events(starttime=t1, endtime=t2, minmagnitude=6)
except:
    pass
print(cat.__str__(print_all=True))
print(cat2.__str__(print_all=True))

cat.write('evtlocal30days.xml', format='QUAKEML')
cat2.write('evtmajor30days.xml', format='QUAKEML')
コード例 #2
0
from obspy.clients.fdsn.client import Client
import obspy

client = Client('IRIS')
starttime = obspy.core.utcdatetime.UTCDateTime('2011-12-01')
endtime = obspy.core.utcdatetime.UTCDateTime('2011-12-31')
cat = client.get_events(starttime=starttime,
                        endtime=endtime,
                        minmagnitude=5.5,
                        catalog='ISC',
                        magnitudetype='mb')
cat
コード例 #3
0
 def test_to_pyrocko_events(self):
     from obspy.clients.fdsn.client import Client
     client = Client('IRIS')
     cat = client.get_events(eventid=609301)
     events = cat.to_pyrocko_events()
     self.assertEqual(len(events), len(cat))
コード例 #4
0
ファイル: extract.py プロジェクト: nikosT/FDSNWS_2_FMNEAR
            sds = Client_SDS(cfg.sds_url)

        logging.info("Retrieving event's info...")

        # get events info
        # catalog object is returned and
        # the first element -event object- is retrieved
        # get the event between 1 second from
        # the input event time
        # if two events occur between 1 second
        # it will just get the first one
        event = fdsn.get_events(
            starttime=date,
            #endtime=date+1,
            minmagnitude=cfg.mag_thres,
            magnitudetype="MLh",
            includeallorigins=False,
            includeallmagnitudes=False,
            includearrivals=True,
            orderby="time-asc",
            limit=1)[0]

        # the requested time window
        # of which start time is set 'timewindow_start' minutes before the Origin
        # and end time is set 'timewindow_end' minutes after the Origin
        starttime = event.origins[0].time - cfg.timewindow_start  #sec
        endtime = event.origins[0].time + cfg.timewindow_end  #sec

        # select those picks with stations
        # that have been not been excluded
        # picks/stations are sorted in ascending order of pick time
        picks=filter(lambda pick: not pick.waveform_id['station_code'] \
コード例 #5
0
def preprocess():
    """
    
    This script preprocesses the MSEED files in the input directories 
    specified in the input file.
 
    
    """


    # Create output directory, if necessary

    outdir = os.path.join('data','processed')
     
    if rank == 0 and not os.path.exists(outdir):
        os.mkdir(outdir)
    if rank == 0 and cfg.verbose:
        print(cfg.__dict__)
    
    comm.Barrier()

    event_filter = None

    if cfg.gcmt_exclude:

        if rank == 0:
            c = Client()
            cata = c.get_events(starttime=UTCDateTime(cfg.gcmt_begin),
                endtime=UTCDateTime(cfg.gcmt_end),catalog='GCMT',
                minmagnitude=5.6)
    
            event_filter = get_event_filter(cata,cfg.Fs_new[-1],
                t0=UTCDateTime(cfg.gcmt_begin),
                t1=UTCDateTime(cfg.gcmt_end))

        
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        event_filter = comm.bcast(event_filter,root=0)
    
    if cfg.event_exclude_local_cat:

        local_cat = Catalog()
        
        if rank == 0:
            c = Client()
            local_cat.extend(c.get_events(
                    starttime=UTCDateTime(cfg.event_exclude_local_cat_begin),
                    endtime=UTCDateTime(cfg.event_exclude_local_cat_end),
                    #catalog=catalog,
                    minmagnitude=cfg.event_exclude_local_cat_minmag,
                    latitude=cfg.event_exclude_local_cat_lat,
                    longitude=cfg.event_exclude_local_cat_lon,
                    maxradius=cfg.event_exclude_local_cat_radius))
            print(len(local_cat),"events in local earthquake catalog.")
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        local_cat = comm.bcast(local_cat,root=0)

    # Create own output directory, if necessary
    rankdir = os.path.join(outdir,
        'rank_%g' %rank)
    if not os.path.exists(rankdir):
        os.mkdir(rankdir)

    
    #- Find input files
    
    content = find_files(cfg.input_dirs,
        cfg.input_format)
    if rank==0:
        print(len(content), "files found") 
    #print(content)

    # processing report file
    sys.stdout.flush()
    output_file = os.path.join(rankdir,
        'processing_report_rank%g.txt' %rank)
    
    if os.path.exists(output_file):
        ofid = open(output_file,'a')
        print('UPDATING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)
    else:
        ofid = open(output_file,'w')
        print('PROCESSING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)


    # select input files for this rank    
    content = content[rank::size]
    if cfg.testrun: # Only 3 files randomly selected
        indices = randint(0,len(content),3)
        content = [content[j] for j in indices]

    # Loop over input files
    for filepath in content:
        
        print('-------------------------------------',file=ofid)
        print('Attempting to process:',file=ofid)
        print(os.path.basename(filepath),file=ofid)
        
        try:
            prstr = PrepStream(filepath,ofid)
        except:
            print('** Problem opening file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        if len(prstr.stream) == 0:
            print('** No data in file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue
        
        try:
            prstr.prepare(cfg)
        except:
           print('** Problems preparing stream: ',file=ofid)
           print('** %s' %filepath,file=ofid)
           continue
            
        try:
            prstr.process(cfg,event_filter,local_cat)
        except:
            print('** Problems processing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        try:
            prstr.write(rankdir,cfg)
        except:
            print('** Problems writing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)

        ofid.flush()
        
    ofid.close()

    print("Rank %g has completed processing." 
        %rank,file=None)
    
    
    try:
        os.system('mv '+rankdir+'/* '+outdir)
    except:
        pass

    os.system('rmdir '+rankdir)
コード例 #6
0
ファイル: plotevents.py プロジェクト: iannesbitt/eqplots
t2 = UTCDateTime.now()
t2str = t2.strftime('%Y-%m-%d %H:%M UTC')
t1 = t2 - timedelta(days=DURATION)

cat = Catalog()
nrcat = Catalog()
cat2 = Catalog()

####### LOCAL ########

try:
    print('%s%% - Getting local earthquakes within %s degrees from IRIS...' %
          (p2, LOCAL_RADIUS))
    cat += iris.get_events(starttime=t1,
                           endtime=t2,
                           latitude=YOUR_LATITUDE,
                           longitude=YOUR_LONGITUDE,
                           minmagnitude=LOCAL_MAG,
                           maxradius=LOCAL_RADIUS)
    for evt in cat:
        evt['event_descriptions'][0]['text'] = TitleCase(
            evt['event_descriptions'][0]['text'].split('/')[0])
        try:
            evt['magnitudes'][0]['creation_info']['author'] = evt['origins'][
                0]['creation_info']['author'].split(',')[0]
        except TypeError as e:
            au = evt['origins'][0]['creation_info']['author'].split(',')[0]
            tm = evt['origins'][0]['time']
            evt['magnitudes'][0]['creation_info'] = CreationInfo(author=au,
                                                                 time=tm)
            print(
                "%s%% - Warning: manually assigned a quake CreationInfo (author: %s, time: %s)"
コード例 #7
0
class Downloader:

    def __init__(self,df,station,outdir):

        self.station = station
        self.data = df
        self.out = outdir
        # self.summary = [] # list to hold all tr_ids
        # print(self.data)
#           Resets indexing of DataFrame

        # print('{}/{}_downloaded_streams.txt'.format(outdir,outdir.split('/')[-1]))
        try:
            #print('Make /Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}'.format(station))
            os.mkdir('{}/{}'.format(self.out,station))
        except FileExistsError:
            print('It already exists, Hooray! Less work for me!')

            # pass
    #   Made

        #self.outfile = open('/Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}/{}_downloaded_streams_Jacks_Split.txt'.format(station,station),'w+')

        self.attempts = 0 #Counter for how many attempted downloads there were
        self.fdsnx = 0 #Counter for how many attempts hit a FDSNNoDataException
        self.dwn = 0 #Counter for how many events were downloaded
        self.ex = 0 #Counter for how many event already exist in filesystem and therefore werent downloaded
        self.ts = 0 #Counter for events who;s traces are too short.
        self.fdsnclient_evt = Client('IRIS') # Serparate client for events (hopefully to get round the "no event avialbel bug")
        self.fdsnclient = Client('IRIS')
#       Download Station Data

    def download_station_data(self):
        """
        Download or read important station data and make sure it is right
        """
        try:
            stat =  self.fdsnclient.get_stations(channel='BH?',station='{}'.format(self.station))
            self.network = stat.networks[0].code
            self.stla = stat.networks[0].stations[0].latitude
            self.stlo = stat.networks[0].stations[0].longitude
            # print(self.network)
            return True
        except FDSNNoDataException:
            return False

    def set_event_data(self,i,sep):
        """
        Function to download event information so we can get mroe accurate start times
        """
        self.evla = self.data.EVLA[i]
        self.evlo = self.data.EVLO[i]
        if sep is False:

            self.date = self.data.DATE[i]
            if 'TIME' in self.data.columns:
                self.time = self.data.TIME[i]
            else:
                self.time = '0000'

            datetime = str(self.date) + "T" + self.time #Combined date and time inputs for converstion t UTCDateTime object
            self.start = obspy.core.UTCDateTime(datetime)

            try:
                if 'TIME' in self.data.columns:
                    end = self.start + 60
                    print('Search starts {} , ends at {}'.format(self.start,end))
                    cat = self.fdsnclient_evt.get_events(starttime=self.start,endtime=self.start+86400 ,latitude=self.evla,longitude=self.evlo,maxradius=0.25,minmag=5.5) #Get event in order to get more accurate event times.
                    # self.time = '{:02d}{:02d}{:02d}'.format(cat[0].origins[0].time.hour,cat[0].origins[0].time.minute,cat[0].origins[0].time.second)
                else:
                    # No Time so we need to search over the whole day
                    end = self.start + 86400

                    print('Search starts {} , ends at {}'.format(self.start,end))
                    cat = self.fdsnclient_evt.get_events(starttime=self.start,endtime=self.start+86400 ,latitude=self.evla,longitude=self.evlo,maxradius=0.25,minmag=5.5) #Get event in order to get more accurate event times.
                if len(cat) > 1:
                    print("WARNING: MORE THAN ONE EVENT OCCURS WITHIN 5km Search!!")
                    print('Selecting Event with the largest magnitude')
                    # Select biggest magnitude
                    max_mag = max([cat[j].magnitudes[0].mag for j in [i for i,c in enumerate(cat)]])
                    cat = cat.filter('magnitude >= {}'.format(max_mag))
                    print(cat)

                self.time = '{:02d}{:02d}{:02d}'.format(cat[0].origins[0].time.hour,cat[0].origins[0].time.minute,cat[0].origins[0].time.second)
                self.start.minute = cat[0].origins[0].time.minute
                self.start.hour = cat[0].origins[0].time.hour
                print(self.time)

                self.start.second = cat[0].origins[0].time.second

                # Lines commented out as they are only needed if TIME is prvoided as hhmm (For Deng's events there is
                # no TIME provided so we just have to used the event time downloaded)
                # if self.start.minute != cat[0].origins[0].time.minute:
                #     self.time = self.time[:2] + str(cat[0].origins[0].time.minute) # Time is hhmm so we subtract the old minute value and add the new one

                dep = cat[0].origins[0].depth
                if dep is not None:
                    self.evdp = dep/1000.0 # divide by 1000 to convert depth to [km[]
                else:
                    self.evdp = 10.0 #Hard code depth to 10.0 km if evdp cannot be found
            except FDSNNoDataException:
                print("No Event Data Available")
                self.evdp = 0
            except FDSNException:
                print("FDSNException for get_events")
                # pass
        elif sep is True:
            self.start = obspy.core.UTCDateTime('{}'.format(self.data.DATE[i])) #iso8601=True
            self.date = '{:04d}{:03d}'.format(self.start.year,self.start.julday)
            self.time = '{:02d}{:02d}{:02d}'.format(self.start.hour,self.start.minute,self.start.second)
            self.evdp = self.data.EVDP[i]

    def download_traces(self,ch):
        """
        Function that downloads the traces for a given event and station
        """
        # if len(self.time) is 6:
        print('Start: {}. self.time: {}'.format(self.start,self.time))
        tr_id = "{}/{}/{}_{}_{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,ch)
        # elif len(self.time) is 4:
            # tr_id = "{}/{}/{}_{}_{}{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,self.start.second,ch)
        # print("Looking for :", tr_id)


        if ch == 'BHE':
            self.attempts += 1 # Counts the number of traces that downloads are attempted for

        if os.path.isfile(tr_id) == True:
            print("{} exists. It was not downloaded".format(tr_id)) # File does not exist

            if ch == 'BHE':
                out_id = '_'.join(tr_id.split('_')[0:-1])
                self.outfile.write('{}_\n'.format(out_id))
                # self.summary.append(out_id)
                self.ex += 1
        else:
            # print("It doesnt exists. Download attempted")
            st = obspy.core.stream.Stream() # Initialises our stream variable

            if self.network is 'BK':
                download_client = obspy.clients.fdsn.Client('NCEDC')
            else:
                download_client = obspy.clients.fdsn.Client('IRIS')
            try:
                st = download_client.get_waveforms(self.network,self.station,'??',ch,self.start,self.start + 3000,attach_response=True)
                # print(st)
                if len(st) > 3:
                    print("WARNING: More than three traces downloaded for event ", tr_id)
                elif len(st) < 3:
                    self.ts += 1

                dist_client = iris.Client() # Creates client to calculate event - station distance
                print('STLA {} STLO {} EVLA {} EVLO {}'.format(self.stla,self.stlo,self.evla,self.evlo))
                self.d = dist_client.distaz(stalat=self.stla,stalon=self.stlo,evtlat=self.evla,evtlon=self.evlo)
                print('Source-Reciever distance is {}'.format(self.d['distance']))
                if (self.d['distance'] >= 85.0) or (self.d['distance'] >=145.0):
                
                        if st[0].stats.endtime - st[0].stats.starttime >= 2000:
                            # print('Record length is {}, which is ok'.format(st[0].stats.endtime - st[0].stats.starttime))
                            self.write_st(st,tr_id)

                            if ch == 'BHE':
                                self.dwn += 1
                                out_id = '_'.join(tr_id.split('_')[0:-1])
                                self.outfile.write('{}_\n'.format(out_id))
                                # self.summary.append(out_id)

                        else:
                            print('Record length is {}, which is too short'.format(st[0].stats.endtime - st[0].stats.starttime))
                            if ch == 'BHE':
                                self.ts += 1
                else:
                    print("Source Reciever Distance is too small")
                    if ch == 'BHE':
                        self.ts += 1
            except FDSNException:
                print('No Data Exception??')
                if ch == 'BHE':
                    self.fdsnx += 1

    def write_st(self,st,tr_id):
        """

        """
        # print('Writing {}'.format(tr_id))
        st[0].write('holder.sac', format='SAC',) # Writes traces as SAC files
        #st.plot()
        st_2 = obspy.core.read('holder.sac')
        #sac = AttribDict() # Creates a dictionary sacd to contain all the header information I want.
        ## Set origin times
        st_2[0].stats.sac.nzyear = self.start.year
        st_2[0].stats.sac.nzjday = self.start.julday
        st_2[0].stats.sac.nzhour = self.start.hour
        st_2[0].stats.sac.nzmin = self.start.minute
        st_2[0].stats.sac.nzsec = self.start.second
        st_2[0].stats.sac.nzmsec = self.start.microsecond
        ## Station Paramters
        st_2[0].stats.sac.stla = self.stla
        st_2[0].stats.sac.stlo = self.stlo
        ## Event Paramters
        st_2[0].stats.sac.evla = self.evla#cat[0].origins[0].latitude # Event latitude
        st_2[0].stats.sac.evlo = self.evlo#cat[0].origins[0].longitude # Event longitude
        st_2[0].stats.sac.evdp = self.evdp#cat[0].origins[0].depth/1000 # Event depth
        st_2[0].stats.sac.kstnm = '{:>8}'.format(self.station)
        # print('stla = {}, stlo = {}, evla = {}, evlo = {}'.format(stla,stlo,evla,evlo))


        st_2[0].stats.sac.gcarc = self.d['distance'] # d.values returns the values from dictionary d produced by distaz. list converts this to a list attribute which can then be indexed to extract the great cricle distance in degrees
        st_2[0].stats.sac.dist = self.d['distancemeters']/1000 # Distnace in kilometers
        st_2[0].stats.sac.baz = self.d['backazimuth'] # Backzimuth (Reciever - SOurce)
        st_2[0].stats.sac.az = self.d['azimuth'] # Azimuth (Source - Receiver)
        st_2[0].write(tr_id, format='SAC',byteorder=1)
コード例 #8
0
import obspy
from obspy.clients.fdsn.client import Client
import matplotlib.pyplot as plt
import numpy as np

starttime   = obspy.core.utcdatetime.UTCDateTime('2001-01-01')
endtime     = obspy.core.utcdatetime.UTCDateTime('2008-01-01')
client=Client('IRIS')
cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=6.5,
                        minlatitude=25., maxlatitude=40., minlongitude=65., maxlongitude=75., catalog='ISC', magnitudetype='MS')


i = 1
evlo = cat[i].origins[0].longitude; evla = cat[i].origins[0].latitude
otime   = cat[i].origins[0].time
stla    = 34.945910; stlo   = -106.457200
dist, az, baz=obspy.geodetics.gps2dist_azimuth(evla, evlo, stla, stlo) # distance is in m
t0 = 3600.*2.

print evlo, evla
print cat[i].event_descriptions[0]
print otime
print 'Mw = ',cat[1].magnitudes[0].mag
st = client.get_waveforms(network='IU', station='ANMO', location='00', channel='LHZ',
                            starttime=otime, endtime=otime+t0, attach_response=True)
pre_filt = (0.001, 0.005, 1, 100.0)
st.detrend()
st.remove_response(pre_filt=pre_filt, taper_fraction=0.1)
st.filter(type='bandpass', freqmin=0.01, freqmax=0.05, corners=4)

tr1=st[0].copy()
コード例 #9
0
ファイル: rfbase.py プロジェクト: marscfeng/surfpy
 def get_events(self, startdate, enddate, add2dbase=True, gcmt=False, Mmin=5.5, Mmax=None,
         minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, latitude=None, longitude=None,\
         minradius=None, maxradius=None, mindepth=None, maxdepth=None, magnitudetype=None, outquakeml=None):
     """Get earthquake catalog from IRIS server
     =======================================================================================================
     ::: input parameters :::
     startdate, enddate  - start/end date for searching
     Mmin, Mmax          - minimum/maximum magnitude for searching                
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     mindepth            - Limit to events with depth, in kilometers, larger than the specified minimum.
     maxdepth            - Limit to events with depth, in kilometers, smaller than the specified maximum.
     magnitudetype       - Specify a magnitude type to use for testing the minimum and maximum limits.
     =======================================================================================================
     """
     starttime   = obspy.core.utcdatetime.UTCDateTime(startdate)
     endtime     = obspy.core.utcdatetime.UTCDateTime(enddate)
     if not gcmt:
         client  = Client('IRIS')
         try:
             catISC      = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=Mmin, maxmagnitude=Mmax, catalog='ISC',
                             minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                             latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, mindepth=mindepth,
                             maxdepth=maxdepth, magnitudetype=magnitudetype)
             endtimeISC  = catISC[0].origins[0].time
         except:
             catISC      = obspy.core.event.Catalog()
             endtimeISC  = starttime
         if endtime.julday-endtimeISC.julday >1:
             try:
                 catPDE  = client.get_events(starttime=endtimeISC, endtime=endtime, minmagnitude=Mmin, maxmagnitude=Mmax, catalog='NEIC PDE',
                             minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                             latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, mindepth=mindepth,
                             maxdepth=maxdepth, magnitudetype=magnitudetype)
                 catalog = catISC+catPDE
             except:
                 catalog = catISC
         else:
             catalog     = catISC
         outcatalog      = obspy.core.event.Catalog()
         # check magnitude
         for event in catalog:
             if event.magnitudes[0].mag < Mmin:
                 continue
             outcatalog.append(event)
     else:
         # Updated the URL on Jul 25th, 2020
         gcmt_url_old    = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/jan76_dec17.ndk'
         gcmt_new        = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/NEW_MONTHLY'
         if starttime.year < 2005:
             print('--- Loading catalog: '+gcmt_url_old)
             cat_old     = obspy.read_events(gcmt_url_old)
             if Mmax != None:
                 cat_old = cat_old.filter("magnitude <= %g" %Mmax)
             if maxlongitude != None:
                 cat_old = cat_old.filter("longitude <= %g" %maxlongitude)
             if minlongitude != None:
                 cat_old = cat_old.filter("longitude >= %g" %minlongitude)
             if maxlatitude != None:
                 cat_old = cat_old.filter("latitude <= %g" %maxlatitude)
             if minlatitude != None:
                 cat_old = cat_old.filter("latitude >= %g" %minlatitude)
             if maxdepth != None:
                 cat_old = cat_old.filter("depth <= %g" %(maxdepth*1000.))
             if mindepth != None:
                 cat_old = cat_old.filter("depth >= %g" %(mindepth*1000.))
             temp_stime  = obspy.core.utcdatetime.UTCDateTime('2018-01-01')
             outcatalog  = cat_old.filter("magnitude >= %g" %Mmin, "time >= %s" %str(starttime), "time <= %s" %str(endtime) )
         else:
             outcatalog      = obspy.core.event.Catalog()
             temp_stime      = copy.deepcopy(starttime)
             temp_stime.day  = 1
         while (temp_stime < endtime):
             year            = temp_stime.year
             month           = temp_stime.month
             yearstr         = str(int(year))[2:]
             monstr          = monthdict[month]
             monstr          = monstr.lower()
             if year==2005 and month==6:
                 monstr      = 'june'
             if year==2005 and month==7:
                 monstr      = 'july'
             if year==2005 and month==9:
                 monstr      = 'sept'
             gcmt_url_new    = gcmt_new+'/'+str(int(year))+'/'+monstr+yearstr+'.ndk'
             try:
                 cat_new     = obspy.read_events(gcmt_url_new, format='ndk')
                 print('--- Loading catalog: '+gcmt_url_new)
             except:
                 print('--- Link not found: '+gcmt_url_new)
                 break
             cat_new         = cat_new.filter("magnitude >= %g" %Mmin, "time >= %s" %str(starttime), "time <= %s" %str(endtime) )
             if Mmax != None:
                 cat_new     = cat_new.filter("magnitude <= %g" %Mmax)
             if maxlongitude != None:
                 cat_new     = cat_new.filter("longitude <= %g" %maxlongitude)
             if minlongitude!=None:
                 cat_new     = cat_new.filter("longitude >= %g" %minlongitude)
             if maxlatitude!=None:
                 cat_new     = cat_new.filter("latitude <= %g" %maxlatitude)
             if minlatitude!=None:
                 cat_new     = cat_new.filter("latitude >= %g" %minlatitude)
             if maxdepth != None:
                 cat_new     = cat_new.filter("depth <= %g" %(maxdepth*1000.))
             if mindepth != None:
                 cat_new     = cat_new.filter("depth >= %g" %(mindepth*1000.))
             outcatalog      += cat_new
             try:
                 temp_stime.month    +=1
             except:
                 temp_stime.year     +=1
                 temp_stime.month    = 1
     try:
         self.cat    += outcatalog
     except:
         self.cat    = outcatalog
     if add2dbase:
         self.add_quakeml(outcatalog)
     if outquakeml is not None:
         self.cat.write(outquakeml, format='quakeml')
     return
コード例 #10
0
import obspy
from obspy.clients.fdsn.client import Client

client = Client('IRIS')
catISC = client.get_events(minmagnitude=3.0,
                           catalog='ISC',
                           starttime=obspy.UTCDateTime('19910101'),
                           minlatitude=52.,
                           maxlatitude=73.,
                           minlongitude=-170.,
                           maxlongitude=-120.)
コード例 #11
0
ファイル: test_obspy_compat.py プロジェクト: emolch/pyrocko
 def test_to_pyrocko_events(self):
     from obspy.clients.fdsn.client import Client
     client = Client('IRIS')
     cat = client.get_events(eventid=609301)
     events = cat.to_pyrocko_events()
     self.assertEqual(len(events), len(cat))
コード例 #12
0
working_dir = '/Users/ljyi/Desktop/SYS6018/final_project'
os.chdir(working_dir)

# =========================== Data Exploration 1. =============================
# prepare variables for data downloading
client = Client('USGS')
start_time = UTCDateTime("1990-01-01T00:00:00")
end_time = UTCDateTime("1991-01-01T00:00:00")  # "2018-11-01T00:00:00"

# sent start time and end time to collect data
t1 = start_time
t2 = end_time

# # download earthquake data
#cat = client.get_events(starttime=t1, endtime=t2, minmagnitude=5.5)
cat = client.get_events(starttime=t1, endtime=t2, minmagnitude=4.5)

# save data into a easier to use format
grid_size = 5
lat_list = []
lon_list = []
mag_list = []
depth_list = []
grid_list = []
time_diff_list = []
for an_event in cat:
    lat = an_event.origins[0].latitude
    lon = an_event.origins[0].longitude
    depth = an_event.origins[0].depth * 0.001
    mag = an_event.magnitudes[0].mag
    origin_time = an_event.origins[0].time
コード例 #13
0
ファイル: bug_get_events.py プロジェクト: NoiseCIEI/NoisePy
from obspy.clients.fdsn.client import Client
import obspy

client=Client('IRIS')
starttime=obspy.core.utcdatetime.UTCDateTime('2011-12-01')
endtime=obspy.core.utcdatetime.UTCDateTime('2011-12-31')
cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=5.5, catalog='ISC', magnitudetype='mb')
cat
コード例 #14
0
ファイル: deep_quake_Alaska.py プロジェクト: junxie01/pyMCinv
from obspy.clients.fdsn.client import Client
import numpy as np
import timeit
import matplotlib.pyplot as plt
import obspy

startdate = '1991-01-01'
enddate = '2015-02-01'
starttime = obspy.core.utcdatetime.UTCDateTime(startdate)
endtime = obspy.core.utcdatetime.UTCDateTime(enddate)

client = Client('IRIS')
cat = client.get_events(catalog='NEIC PDE', minlatitude=55, maxlatitude=65, minlongitude=-170, maxlongitude=-140, \
                        mindepth=70., starttime=starttime, endtime=endtime)
コード例 #15
0
ファイル: main.py プロジェクト: outdoorpet/compare_catalogues
    def get_catalogues(self):

        isc_catalogue = Client(self.sel_dlg_ret[0])
        oth_catalogue = Client(self.sel_dlg_ret[1])

        t_start = self.sel_dlg_ret[2]
        t_end = self.sel_dlg_ret[3]

        print('\nRequesting Earthquake Catalogues from Remote Servers.....')

        self.isc_cat = isc_catalogue.get_events(starttime=t_start,
                                                endtime=t_end)
        self.oth_cat = oth_catalogue.get_events(starttime=t_start,
                                                endtime=t_end)

        # create empty data frame
        self.isc_df = pd.DataFrame(data=None,
                                   columns=[
                                       'isc_ind', 'event_id', 'qtime', 'lat',
                                       'lon', 'depth', 'mag'
                                   ])
        self.oth_df = pd.DataFrame(data=None,
                                   columns=[
                                       'oth_ind', 'event_id', 'qtime', 'lat',
                                       'lon', 'depth', 'mag'
                                   ])

        # iterate through the events in oth cat
        for _i, event in enumerate(self.oth_cat):
            print "\r     Parsing event from Local Cat", _i + 1, ' of ', len(
                self.oth_cat), ' ....',
            sys.stdout.flush()

            # Get quake origin info
            origin_info = event.preferred_origin() or event.origins[0]

            try:
                mag_info = event.preferred_magnitude() or event.magnitudes[0]
                magnitude = mag_info.mag
            except IndexError:
                # No magnitude for event
                magnitude = None

            self.oth_df.loc[_i] = [
                int(_i),
                str(event.resource_id.id),
                int(origin_info.time.timestamp), origin_info.latitude,
                origin_info.longitude, origin_info.depth, magnitude
            ]

        # Convert the depth field to Km from m
        self.oth_df.loc[:, 'depth'] *= (1.0 / 1000)

        print('\n')
        # iterate through the events in isc cat
        for _i, event in enumerate(self.isc_cat):
            print "\r     Parsing event from ISC Cat", _i + 1, ' of ', len(
                self.isc_cat), ' ....',
            sys.stdout.flush()
            # Get quake origin info
            origin_info = event.preferred_origin() or event.origins[0]
            try:
                mag_info = event.preferred_magnitude() or event.magnitudes[0]
                magnitude = mag_info.mag
            except IndexError:
                # No magnitude for event
                magnitude = None
            self.isc_df.loc[_i] = [
                int(_i),
                str(event.resource_id.id),
                int(origin_info.time.timestamp), origin_info.latitude,
                origin_info.longitude, origin_info.depth, magnitude
            ]

        # Convert the depth field to Km from m
        self.isc_df.loc[:, 'depth'] *= (1.0 / 1000)

        # =====================Finding matching events =======================

        print('\nFinding Matching Events.....')
        progressDialog = QtGui.QProgressDialog("Finding Matching Events",
                                               "Cancel", 0, len(self.oth_df))

        global match_index
        global length_oth_df
        match_index = 0
        length_oth_df = len(self.oth_df)

        def get_isc_match(row):
            global match_index
            global length_oth_df
            progressDialog.setValue(match_index)
            print "\r     Matching event from Local Cat", match_index, ' of ', length_oth_df, ' ....',
            sys.stdout.flush()
            temp = self.isc_df_drop.apply(lambda x: abs(x - row),
                                          axis=1)  # Pandas DF
            # NaNs are treated as small
            smallest_temp = temp.nsmallest(2,
                                           columns=['lat', 'lon', 'qtime'
                                                    ]).iloc[0]  # Pandas Series

            distance_diff = degrees2kilometers(
                math.sqrt(
                    abs(smallest_temp['lat'])**2 +
                    abs(smallest_temp['lon'])**2))

            isc_index = smallest_temp.name

            if smallest_temp['qtime'] <= 15 and \
                    (abs(smallest_temp['lon']) <= 1 or np.isnan(smallest_temp['lon'])) and \
                    (abs(smallest_temp['lat']) <= 1 or np.isnan(smallest_temp['lat'])):
                ret_s = pd.Series([
                    isc_index, self.isc_df.loc[isc_index, 'event_id'],
                    self.isc_df.loc[isc_index, 'qtime'],
                    self.isc_df.loc[isc_index,
                                    'lat'], self.isc_df.loc[isc_index, 'lon'],
                    self.isc_df.loc[isc_index, 'depth'],
                    self.isc_df.loc[isc_index, 'mag'], smallest_temp['qtime'],
                    distance_diff, smallest_temp['depth'], smallest_temp['mag']
                ],
                                  index=[
                                      'isc_ind', 'event_id_match',
                                      'qtime_match', 'lat_match', 'lon_match',
                                      'depth_match', 'mag_match', 'qtime_diff',
                                      'dist_diff', 'depth_diff', 'mag_diff'
                                  ])
            else:
                ret_s = pd.Series([None, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
                                  index=[
                                      'isc_ind', 'event_id_match',
                                      'qtime_match', 'lat_match', 'lon_match',
                                      'depth_match', 'mag_match', 'qtime_diff',
                                      'dist_diff', 'depth_diff', 'mag_diff'
                                  ])

            match_index += 1
            return ret_s

        # Drop the event_id column (strings) from the data frame to apply vectorised function
        self.oth_df_drop = self.oth_df.drop('event_id', axis=1)
        self.isc_df_drop = self.isc_df.drop('event_id', axis=1)
        self.matched_df = pd.concat(
            (self.oth_df, self.oth_df_drop.apply(get_isc_match, axis=1)),
            axis=1)

        # drop a row from the matched df if isc_ind in matched df is NaN
        # (I.e. there was no matching earthquake in isc cat)
        self.matched_df.dropna(subset=['isc_ind'], inplace=True)
        self.matched_df.reset_index(drop=True, inplace=True)

        # find isc events not matched and oth events not matched
        self.isc_not_matched_df = self.isc_df[~self.isc_df['isc_ind'].
                                              isin(self.matched_df['isc_ind'])]
        self.oth_not_matched_df = self.oth_df[~self.oth_df['oth_ind'].
                                              isin(self.matched_df['oth_ind'])]
コード例 #16
0
# prepare the log file
# location is based according to configuration
logging.basicConfig(filename=cfg.logfile, level=logging.INFO, format='%(asctime)s %(message)s\n')

try:
    logging.info("Connecting to FDSN Server:  " + cfg.fdsnws_url+"...")
    fdsn = Client(cfg.fdsnws_url)

    # get the first event
    logging.info('Checking for timespan: ' + str(starttime) + ' - ' + str(endtime))
    catalog=fdsn.get_events(starttime=starttime, 
                          endtime=endtime, 
                          minmagnitude=cfg.mag_thres,
                          magnitudetype="MLh",
                          includeallorigins=False,
                          includeallmagnitudes=False,
                          includearrivals=True,
                          orderby="time-asc",
                          limit=cfg.event_limit)

    # for every event that is found, call the process
    for event in catalog:
        logging.info(str(event))

        # call the python code
        os.system("python " + cfg.extract + " " + str(event.origins[0].time))
        # get event datetime
        # cut off microseconds
       # date=UTCDateTime(str(event.origins[0].time).partition(".")[0])
        # create working dir folder respective to the event datetime