def __init__(self,df,station,outdir):

        self.station = station
        self.data = df
        self.out = outdir
        # self.summary = [] # list to hold all tr_ids
        # print(self.data)
#           Resets indexing of DataFrame

        # print('{}/{}_downloaded_streams.txt'.format(outdir,outdir.split('/')[-1]))
        try:
            #print('Make /Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}'.format(station))
            os.mkdir('{}/{}'.format(self.out,station))
        except FileExistsError:
            print('It already exists, Hooray! Less work for me!')

            # pass
    #   Made

        #self.outfile = open('/Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}/{}_downloaded_streams_Jacks_Split.txt'.format(station,station),'w+')

        self.attempts = 0 #Counter for how many attempted downloads there were
        self.fdsnx = 0 #Counter for how many attempts hit a FDSNNoDataException
        self.dwn = 0 #Counter for how many events were downloaded
        self.ex = 0 #Counter for how many event already exist in filesystem and therefore werent downloaded
        self.ts = 0 #Counter for events who;s traces are too short.
        self.fdsnclient_evt = Client('IRIS') # Serparate client for events (hopefully to get round the "no event avialbel bug")
        self.fdsnclient = Client('IRIS')
    def queryByBBoxInterval(self, outputFileName, bbox, timeinterval, chan='*Z', bbpadding=2,
                            event_id=None, verbose=False):
        """ Time interval is a tuple (starttime,endtime)
        """
        assert len(timeinterval) == 2, "timeinterval must be a tuple of ascending timestamps. len=" + str(
            len(timeinterval)) + " " + str(timeinterval)

        query_ds = pyasdf.ASDFDataSet(outputFileName)

        client = Client(self._client)
        ref_inv = client.get_stations(network=self._network,
                                      starttime=UTCDateTime(timeinterval[0]),
                                      endtime=UTCDateTime(timeinterval[1]),
                                      minlongitude=bbox[0] - bbpadding,
                                      maxlongitude=bbox[1] + bbpadding,
                                      minlatitude=bbox[2] - bbpadding,
                                      maxlatitude=bbox[3] + bbpadding,
                                      level='channel')

        if verbose:
            print(ref_inv)

        ref_st = Stream()

        # go through inventory and request timeseries data
        for net in ref_inv:
            for stn in net:
                stime = UTCDateTime(timeinterval[0])
                etime = UTCDateTime(timeinterval[1])
                step = 3600*24*10
                while stime + step < etime:
                    try:
                        ref_st = client.get_waveforms(network=net.code, station=stn.code,
                                                      channel=chan, location='*',
                                                      starttime=stime,
                                                      endtime=stime+step)
                        print ref_st
                        self.ref_stations.append(net.code + '.' + stn.code)
                        st_inv = ref_inv.select(station=stn.code, channel=chan)
                        
                        query_ds.add_stationxml(st_inv)
                        for tr in ref_st:
                            query_ds.add_waveforms(tr, "reference_station")
                    except FDSNException:
                        print('Data not available from Reference Station: ' + stn.code)
                    # end try
                    stime += step
                #wend
        # end for

        #tr.write(os.path.join(os.path.dirname(outputFileName), tr.id + ".MSEED"),
        #         format="MSEED") # Don't write miniseed
        if verbose:
            print("Wrote Reference Waveforms to ASDF file: " + outputFileName)
            print('\nWaveform data query completed.')

        metaOutputFileName = os.path.join(os.path.dirname(outputFileName),
                                          'meta.%s.xml'%(os.path.basename(outputFileName)))
        ref_inv.write(metaOutputFileName, format="STATIONXML")
        del query_ds
def generateStationTestData(sta):

    time_range = (UTCDateTime(TIME_RANGE[0]), UTCDateTime(TIME_RANGE[1]))

    client = Client("IRIS")
    inv = client.get_stations(network=NETWORK,
                              station=sta,
                              channel=CHANNEL,
                              starttime=time_range[0],
                              endtime=time_range[1],
                              level='channel')
    print(inv)

    traces = client.get_waveforms(network=NETWORK,
                                  station=sta,
                                  channel=CHANNEL,
                                  location='*',
                                  starttime=time_range[0],
                                  endtime=time_range[1])
    print(traces)

    outfile = 'test_data_' + sta + '.h5'
    asdf_out = pyasdf.ASDFDataSet(outfile, mode='w')
    asdf_out.add_stationxml(inv)
    asdf_out.add_waveforms(traces, TAG)

    print("Saved data to " + outfile)
Exemple #4
0
        def _get_client(client_name):
            # It might already be an initialized client - in that case just
            # use it.
            if isinstance(client_name, Client):
                name, client = client_name.base_url, client_name
            else:
                try:
                    this_client = Client(client_name, debug=self.debug)
                    name, client = client_name, this_client
                except utils.ERRORS as e:
                    if "timeout" in str(e).lower():
                        extra = " (timeout)"
                    else:
                        extra = ""
                    logger.warn("Failed to initialize client '%s'.%s" %
                                (client_name, extra))
                    return client_name, None

            services = sorted([
                _i for _i in client.services.keys()
                if not _i.startswith("available")
            ])
            if "dataselect" not in services or "station" not in services:
                logger.info("Cannot use client '%s' as it does not have "
                            "'dataselect' and/or 'station' services." % name)
                return name, None
            return name, client
def getXmlFromIRIS(network=None):
    if network is None:
        return
    client = Client("IRIS")
    # Use level=response to get channel information...
    # ...which holds the location code necessary for station referencing
    inv = client.get_stations(network=network, level='response')
    inv.write('./' + network + '.xml', format='stationxml', validate=True)
Exemple #6
0
def get_arclink_event_data(bulk, fname, dataless, event):
    from obspy.core.utcdatetime import UTCDateTime
    try:
        from obspy.arclink.client import Client
    except:
        from obspy.clients.arclink.client import Client
        #from obspy.clients.fdsn.client import Client
    from os import path
    from numpy import nan, isnan
    from mapping_tools import distance
    '''
    Code to extract IRIS data, one station at a time.  Exports mseed file to 
    working directory
    
    datetime tuple fmt = (Y,m,d,H,M)
    sta = station
    '''
    try:
        #first, check it site is in distance and azimuthal range
        for channel in ['SHZ', 'EHZ', 'BHZ', 'HHZ', 'BNZ', 'HNZ']:
            seedid = '.'.join((b[0], b[1], '00', channel))  #'AU.DPH.00.BNZ'

            try:
                staloc = dataless.get_coordinates(seedid, b[4])
            except:
                a = 1  # dummy call
            # try another seed id fmt
            seedid = '.'.join((b[0], b[1], '', channel))  #'AU.DPH.00.BNZ'
            try:
                staloc = dataless.get_coordinates(seedid, b[4])
            except:
                a = 1  # dummy call

        # now get distance and azimuth
        rngkm, az, baz = distance(event['lat'], event['lon'],
                                  staloc['latitude'], staloc['longitude'])
        #print(rngkm, az, baz)
        print('arclink', seedid)
        getRecord = False
        if rngkm <= 2000. and az > 110. and az < 250.:
            getRecord = True
        elif rngkm <= 50.:
            getRecord = True

        # check if file already exists
        if not path.isfile(fname) and getRecord == True:
            print('Getting:', fname)
            client = Client(user='******')
            st = client.get_waveforms(bulk[0], bulk[1], bulk[2], bulk[3],
                                      bulk[4], bulk[5])
            st = st.merge(method=0, fill_value='interpolate')

        print('Writing file:', fname)
        st.write(fname, format="MSEED")
    except:
        print('No data for:', fname)

    return st
Exemple #7
0
 def _get_client(client_name):
     try:
         this_client = Client(client_name, debug=self.debug)
     except utils.ERRORS as e:
         if "timeout" in str(e).lower():
             extra = " (timeout)"
         else:
             extra = ""
         logger.warn("Failed to initialize client '%s'.%s"
                     % (client_name, extra))
         return client_name, None
     services = sorted([_i for _i in this_client.services.keys()
                        if not _i.startswith("available")])
     if "dataselect" not in services or "station" not in services:
         logger.info("Cannot use client '%s' as it does not have "
                     "'dataselect' and/or 'station' services."
                     % client_name)
         return client_name, None
     return client_name, this_client
Exemple #8
0
 def get_stations(self, startdate=None, enddate=None,  network=None, station=None, location=None, channel=None,
         minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, latitude=None, longitude=None, minradius=None, maxradius=None):
     """Get station inventory from IRIS server
     =======================================================================================================
     Input Parameters:
     startdate, enddata  - start/end date for searching
     network             - Select one or more network codes.
                             Can be SEED network codes or data center defined codes.
                                 Multiple codes are comma-separated (e.g. "IU,TA").
     station             - Select one or more SEED station codes.
                             Multiple codes are comma-separated (e.g. "ANMO,PFO").
     location            - Select one or more SEED location identifiers.
                             Multiple identifiers are comma-separated (e.g. "00,01").
                             As a special case ?--? (two dashes) will be translated to a string of two space
                             characters to match blank location IDs.
     channel             - Select one or more SEED channel codes.
                             Multiple codes are comma-separated (e.g. "BHZ,HHZ").             
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     =======================================================================================================
     """
     try: starttime      = obspy.core.utcdatetime.UTCDateTime(startdate)
     except: starttime   = None
     try: endtime        = obspy.core.utcdatetime.UTCDateTime(enddate)
     except: endtime     = None
     client  = Client('IRIS')
     inv     = client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, channel=channel, 
                 minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                     latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, level='channel')
     self.add_stationxml(inv)
     try: self.inv       += inv
     except: self.inv    = inv
     return 
    def get_catalogues(self):

        isc_catalogue = Client(self.sel_dlg_ret[0])
        oth_catalogue = Client(self.sel_dlg_ret[1])

        t_start = self.sel_dlg_ret[2]
        t_end = self.sel_dlg_ret[3]

        print('\nRequesting Earthquake Catalogues from Remote Servers.....')

        self.isc_cat = isc_catalogue.get_events(starttime=t_start,
                                                endtime=t_end)
        self.oth_cat = oth_catalogue.get_events(starttime=t_start,
                                                endtime=t_end)

        # create empty data frame
        self.isc_df = pd.DataFrame(data=None,
                                   columns=[
                                       'isc_ind', 'event_id', 'qtime', 'lat',
                                       'lon', 'depth', 'mag'
                                   ])
        self.oth_df = pd.DataFrame(data=None,
                                   columns=[
                                       'oth_ind', 'event_id', 'qtime', 'lat',
                                       'lon', 'depth', 'mag'
                                   ])

        # iterate through the events in oth cat
        for _i, event in enumerate(self.oth_cat):
            print "\r     Parsing event from Local Cat", _i + 1, ' of ', len(
                self.oth_cat), ' ....',
            sys.stdout.flush()

            # Get quake origin info
            origin_info = event.preferred_origin() or event.origins[0]

            try:
                mag_info = event.preferred_magnitude() or event.magnitudes[0]
                magnitude = mag_info.mag
            except IndexError:
                # No magnitude for event
                magnitude = None

            self.oth_df.loc[_i] = [
                int(_i),
                str(event.resource_id.id),
                int(origin_info.time.timestamp), origin_info.latitude,
                origin_info.longitude, origin_info.depth, magnitude
            ]

        # Convert the depth field to Km from m
        self.oth_df.loc[:, 'depth'] *= (1.0 / 1000)

        print('\n')
        # iterate through the events in isc cat
        for _i, event in enumerate(self.isc_cat):
            print "\r     Parsing event from ISC Cat", _i + 1, ' of ', len(
                self.isc_cat), ' ....',
            sys.stdout.flush()
            # Get quake origin info
            origin_info = event.preferred_origin() or event.origins[0]
            try:
                mag_info = event.preferred_magnitude() or event.magnitudes[0]
                magnitude = mag_info.mag
            except IndexError:
                # No magnitude for event
                magnitude = None
            self.isc_df.loc[_i] = [
                int(_i),
                str(event.resource_id.id),
                int(origin_info.time.timestamp), origin_info.latitude,
                origin_info.longitude, origin_info.depth, magnitude
            ]

        # Convert the depth field to Km from m
        self.isc_df.loc[:, 'depth'] *= (1.0 / 1000)

        # =====================Finding matching events =======================

        print('\nFinding Matching Events.....')
        progressDialog = QtGui.QProgressDialog("Finding Matching Events",
                                               "Cancel", 0, len(self.oth_df))

        global match_index
        global length_oth_df
        match_index = 0
        length_oth_df = len(self.oth_df)

        def get_isc_match(row):
            global match_index
            global length_oth_df
            progressDialog.setValue(match_index)
            print "\r     Matching event from Local Cat", match_index, ' of ', length_oth_df, ' ....',
            sys.stdout.flush()
            temp = self.isc_df_drop.apply(lambda x: abs(x - row),
                                          axis=1)  # Pandas DF
            # NaNs are treated as small
            smallest_temp = temp.nsmallest(2,
                                           columns=['lat', 'lon', 'qtime'
                                                    ]).iloc[0]  # Pandas Series

            distance_diff = degrees2kilometers(
                math.sqrt(
                    abs(smallest_temp['lat'])**2 +
                    abs(smallest_temp['lon'])**2))

            isc_index = smallest_temp.name

            if smallest_temp['qtime'] <= 15 and \
                    (abs(smallest_temp['lon']) <= 1 or np.isnan(smallest_temp['lon'])) and \
                    (abs(smallest_temp['lat']) <= 1 or np.isnan(smallest_temp['lat'])):
                ret_s = pd.Series([
                    isc_index, self.isc_df.loc[isc_index, 'event_id'],
                    self.isc_df.loc[isc_index, 'qtime'],
                    self.isc_df.loc[isc_index,
                                    'lat'], self.isc_df.loc[isc_index, 'lon'],
                    self.isc_df.loc[isc_index, 'depth'],
                    self.isc_df.loc[isc_index, 'mag'], smallest_temp['qtime'],
                    distance_diff, smallest_temp['depth'], smallest_temp['mag']
                ],
                                  index=[
                                      'isc_ind', 'event_id_match',
                                      'qtime_match', 'lat_match', 'lon_match',
                                      'depth_match', 'mag_match', 'qtime_diff',
                                      'dist_diff', 'depth_diff', 'mag_diff'
                                  ])
            else:
                ret_s = pd.Series([None, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
                                  index=[
                                      'isc_ind', 'event_id_match',
                                      'qtime_match', 'lat_match', 'lon_match',
                                      'depth_match', 'mag_match', 'qtime_diff',
                                      'dist_diff', 'depth_diff', 'mag_diff'
                                  ])

            match_index += 1
            return ret_s

        # Drop the event_id column (strings) from the data frame to apply vectorised function
        self.oth_df_drop = self.oth_df.drop('event_id', axis=1)
        self.isc_df_drop = self.isc_df.drop('event_id', axis=1)
        self.matched_df = pd.concat(
            (self.oth_df, self.oth_df_drop.apply(get_isc_match, axis=1)),
            axis=1)

        # drop a row from the matched df if isc_ind in matched df is NaN
        # (I.e. there was no matching earthquake in isc cat)
        self.matched_df.dropna(subset=['isc_ind'], inplace=True)
        self.matched_df.reset_index(drop=True, inplace=True)

        # find isc events not matched and oth events not matched
        self.isc_not_matched_df = self.isc_df[~self.isc_df['isc_ind'].
                                              isin(self.matched_df['isc_ind'])]
        self.oth_not_matched_df = self.oth_df[~self.oth_df['oth_ind'].
                                              isin(self.matched_df['oth_ind'])]
Exemple #10
0
import matplotlib as mpl
import numpy as np
from scipy.signal import hilbert
import matplotlib.animation as animation
Writer = animation.writers['pillow']
writer = Writer(fps=20, metadata=dict(artist='Me'), bitrate=1800)

sta = 'R32B'
loc = '00'
net = 'N4'
chan = 'HHZ'

stime = UTCDateTime('2019-08-16 12:59:10')
etime = stime + 120

client = Client()
inv = client.get_stations(network=net,
                          station=sta,
                          starttime=stime,
                          endtime=etime,
                          channel=chan,
                          level="response")
st = Stream()
st += client.get_waveforms(net, sta, loc, chan, stime, etime)

st.detrend('constant')
st.merge(fill_value=0)
st.attach_response(inv)
st.remove_response(output="DISP")
#st.rotate(method="->ZNE",inventory=inv)
st.filter("bandpass", freqmin=.5, freqmax=5)
Exemple #11
0
#Parallelised autopick harvester. We have like a million picks so this is the only way to go about it
import sys

from seismic.ASDFdatabase.FederatedASDFDataSet import FederatedASDFDataSet
from seismic.ml_classifier.data_harvester.autopicks import pickLoaderRand

from obspy.clients.fdsn.client import Client
ic = Client("IRIS")
fds = FederatedASDFDataSet(
    '/g/data/ha3/Passive/SHARED_DATA/Index/asdf_files.txt',
    variant='db',
    use_json_db=True,
    logger=None)

import numpy as np

pl = pickLoaderRand(fds, ic)

import multiprocessing as mp

nproc = mp.cpu_count()
print(nproc)


def lockInit(l):
    global lock
    lock = l


l = mp.Lock()
pool = mp.Pool(processes=nproc, initializer=lockInit, initargs=(l, ))
def iris_client():
    """return the IRIS client"""
    try:
        return Client()
    except FDSNException:
        pytest.skip("could not connect to webservice")
Exemple #13
0
def preprocess():
    """
    
    This script preprocesses the MSEED files in the input directories 
    specified in the input file.
 
    
    """


    # Create output directory, if necessary

    outdir = os.path.join('data','processed')
     
    if rank == 0 and not os.path.exists(outdir):
        os.mkdir(outdir)
    if rank == 0 and cfg.verbose:
        print(cfg.__dict__)
    
    comm.Barrier()

    event_filter = None

    if cfg.gcmt_exclude:

        if rank == 0:
            c = Client()
            cata = c.get_events(starttime=UTCDateTime(cfg.gcmt_begin),
                endtime=UTCDateTime(cfg.gcmt_end),catalog='GCMT',
                minmagnitude=5.6)
    
            event_filter = get_event_filter(cata,cfg.Fs_new[-1],
                t0=UTCDateTime(cfg.gcmt_begin),
                t1=UTCDateTime(cfg.gcmt_end))

        
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        event_filter = comm.bcast(event_filter,root=0)
    
    if cfg.event_exclude_local_cat:

        local_cat = Catalog()
        
        if rank == 0:
            c = Client()
            local_cat.extend(c.get_events(
                    starttime=UTCDateTime(cfg.event_exclude_local_cat_begin),
                    endtime=UTCDateTime(cfg.event_exclude_local_cat_end),
                    #catalog=catalog,
                    minmagnitude=cfg.event_exclude_local_cat_minmag,
                    latitude=cfg.event_exclude_local_cat_lat,
                    longitude=cfg.event_exclude_local_cat_lon,
                    maxradius=cfg.event_exclude_local_cat_radius))
            print(len(local_cat),"events in local earthquake catalog.")
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        local_cat = comm.bcast(local_cat,root=0)

    # Create own output directory, if necessary
    rankdir = os.path.join(outdir,
        'rank_%g' %rank)
    if not os.path.exists(rankdir):
        os.mkdir(rankdir)

    
    #- Find input files
    
    content = find_files(cfg.input_dirs,
        cfg.input_format)
    if rank==0:
        print(len(content), "files found") 
    #print(content)

    # processing report file
    sys.stdout.flush()
    output_file = os.path.join(rankdir,
        'processing_report_rank%g.txt' %rank)
    
    if os.path.exists(output_file):
        ofid = open(output_file,'a')
        print('UPDATING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)
    else:
        ofid = open(output_file,'w')
        print('PROCESSING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)


    # select input files for this rank    
    content = content[rank::size]
    if cfg.testrun: # Only 3 files randomly selected
        indices = randint(0,len(content),3)
        content = [content[j] for j in indices]

    # Loop over input files
    for filepath in content:
        
        print('-------------------------------------',file=ofid)
        print('Attempting to process:',file=ofid)
        print(os.path.basename(filepath),file=ofid)
        
        try:
            prstr = PrepStream(filepath,ofid)
        except:
            print('** Problem opening file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        if len(prstr.stream) == 0:
            print('** No data in file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue
        
        try:
            prstr.prepare(cfg)
        except:
           print('** Problems preparing stream: ',file=ofid)
           print('** %s' %filepath,file=ofid)
           continue
            
        try:
            prstr.process(cfg,event_filter,local_cat)
        except:
            print('** Problems processing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        try:
            prstr.write(rankdir,cfg)
        except:
            print('** Problems writing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)

        ofid.flush()
        
    ofid.close()

    print("Rank %g has completed processing." 
        %rank,file=None)
    
    
    try:
        os.system('mv '+rankdir+'/* '+outdir)
    except:
        pass

    os.system('rmdir '+rankdir)
Exemple #14
0
 def read_TA_lst(self, infname, startdate=None, enddate=None,  startbefore=None, startafter=None, endbefore=None, endafter=None, location=None, channel=None,\
         includerestricted=False, minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, \
         latitude=None, longitude=None, minradius=None, maxradius=None):
     """Get station inventory from IRIS server
     =======================================================================================================
     Input Parameters:
     startdate, enddata  - start/end date for searching
     network             - Select one or more network codes.
                             Can be SEED network codes or data center defined codes.
                                 Multiple codes are comma-separated (e.g. "IU,TA").
     station             - Select one or more SEED station codes.
                             Multiple codes are comma-separated (e.g. "ANMO,PFO").
     location            - Select one or more SEED location identifiers.
                             Multiple identifiers are comma-separated (e.g. "00,01").
                             As a special case ?--? (two dashes) will be translated to a string of two space
                             characters to match blank location IDs.
     channel             - Select one or more SEED channel codes.
                             Multiple codes are comma-separated (e.g. "BHZ,HHZ").
     includerestricted   - default is False
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     =======================================================================================================
     """
     try:
         starttime = obspy.core.utcdatetime.UTCDateTime(startdate)
     except:
         starttime = None
     try:
         endtime = obspy.core.utcdatetime.UTCDateTime(enddate)
     except:
         endtime = None
     try:
         startbefore = obspy.core.utcdatetime.UTCDateTime(startbefore)
     except:
         startbefore = None
     try:
         startafter = obspy.core.utcdatetime.UTCDateTime(startafter)
     except:
         startafter = None
     try:
         endbefore = obspy.core.utcdatetime.UTCDateTime(endbefore)
     except:
         endbefore = None
     try:
         endafter = obspy.core.utcdatetime.UTCDateTime(endafter)
     except:
         endafter = None
     client = Client('IRIS')
     init_flag = True
     with open(infname, 'rb') as fio:
         for line in fio.readlines():
             network = line.split()[1]
             station = line.split()[2]
             if network == 'NET':
                 continue
             # print network, station
             if init_flag:
                 try:
                     inv     = client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, startbefore=startbefore, startafter=startafter,\
                                 endbefore=endbefore, endafter=endafter, channel=channel, minlatitude=minlatitude, maxlatitude=maxlatitude, \
                                     minlongitude=minlongitude, maxlongitude=maxlongitude, latitude=latitude, longitude=longitude, minradius=minradius, \
                                         maxradius=maxradius, level='channel', includerestricted=includerestricted)
                 except:
                     print('No station inv: ', line)
                     continue
                 init_flag = False
                 continue
             try:
                 inv     += client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, startbefore=startbefore, startafter=startafter,\
                             endbefore=endbefore, endafter=endafter, channel=channel, minlatitude=minlatitude, maxlatitude=maxlatitude, \
                                 minlongitude=minlongitude, maxlongitude=maxlongitude, latitude=latitude, longitude=longitude, minradius=minradius, \
                                     maxradius=maxradius, level='channel', includerestricted=includerestricted)
             except:
                 print('No station inv: ', line)
                 continue
     self.add_stationxml(inv)
     try:
         self.inv += inv
     except:
         self.inv = inv
     return
def fdsnws2directory():
    '''
    Much like the fdsnws2geomag but is purely design to get the data from the FDSN-WS
    and add it according to the structure found on geomagnetic daqs servers.

    These structure vary depending on the source but can be customized by input argument.

    Filename for each can not be customized since these following strict naming convention.

    The convention can be found in the pygeomag/data/formats directory.
    '''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument(
        '--directory',
        default=DEFAULT_DIRECTORY,
        help=
        'Output directory with optional datetime parameter as accept by python datetime (default: %s).'
        % DEFAULT_DIRECTORY)
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station',
                        default='*',
                        help='Station code (default: *)')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        "," % DEFAULT_CHANNELS)
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream = stream.merge_by_location().trim(starttime, endtime)

    # Loop through the list of stream and generate the unique list of station
    # codes.  We know the network code is constant and its a single sampling rate
    # request.
    stations = set([trace.stats.station for trace in stream])

    # Convert the directory format string to a full path
    directory = starttime.strftime(args.directory)
    logging.info("Creating directory %s if does not exist", directory)
    pathlib.Path(directory).mkdir(parents=True, exist_ok=True)

    for station in stations:
        # Extract the station I need
        extract = stream.select(station=station)
        # Generate its filename (depends on the format)
        if args.format in ['iaga2002']:
            filename = pygeomag.data.formats.iaga2002.get_filename(
                extract[0].stats)
        elif args.format in ['imfv122']:
            filename = pygeomag.data.formats.imfv122.get_filename(
                extract[0].stats)
        else:
            raise ValueError(
                "Unable to generate filename for unhandled format %s" %
                args.format)
        filename = os.path.join(directory, filename)
        logging.info("Writing magnetic data to %s", filename)
        extract.write(filename, format=args.format, inventory=inventory)
Exemple #16
0
###########################################################

from obspy import read_inventory, read_events
from obspy.clients.fdsn.client import Client
from obspy.core.event.catalog import Catalog
from obspy.core.inventory.network import Network
from obspy.core.inventory.inventory import Inventory
from obspy.core import UTCDateTime
from datetime import timedelta
from mpl_toolkits.basemap import Basemap
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt

iris = Client("IRIS")
t2 = UTCDateTime.now()
t1 = t2 - timedelta(days=30)

cat = Catalog()
cat2 = Catalog()

try:
    cat += iris.get_events(starttime=t1,
                           endtime=t2,
                           latitude=YOUR_LATITUDE,
                           longitude=YOUR_LONGITUDE,
                           maxradius=15)
except:
    pass
Exemple #17
0
def runwphase(output_dir=None,
              server: Union[Client, str] = None,
              greens_functions_dir=settings.GREENS_FUNCTIONS,
              n_workers_in_pool=settings.WORKER_COUNT,
              processing_level=3,
              output_dir_can_exist=False,
              user=None,
              password=None,
              **kwargs) -> model.WPhaseResult:
    """
    Run wphase.

    :param greens_functions_dir: The Green data Directory.
    :param output_dir: Full file path to the output directory. **DO NOT USE
        RELATIVE PATHS**.
    :param n_workers_in_pool: Number of processors to use, (default
        :py:data:`wphase.settings.WORKER_COUNT`) specifies as many as is
        reasonable'.
    :param processing_level: Processing level.
    :param output_dir_can_exist: Can the output directory already exist?
    """

    client: Optional[
        Client] = None  # can be None if inv+waveform files are provided
    if isinstance(server, Client):
        client = server
    elif isinstance(server, str):
        if server.lower() == 'antelope':
            raise Exception('Antelope is no longer supported.')
        client = Client(server, user=user, password=password)

    # Make the output directory (fails if it already exists).
    if output_dir:
        logger.debug("Creating output directory %s", output_dir)
        try:
            os.makedirs(output_dir)
        except OSError as e:
            if e.errno != errno.EEXIST or not output_dir_can_exist:
                raise

    wphase_results = wphase_runner(output_dir, client, greens_functions_dir,
                                   n_workers_in_pool, processing_level,
                                   **kwargs)

    wphase_results.HostName = settings.HOST_NAME
    wphase_results.DataSource = client.base_url if client else "local files"

    # save the results if output_dir provided
    if output_dir:
        try:
            # TODO: Should this be done in runwphase?
            with open(os.path.join(output_dir, settings.OUTPUT_FILE_NAME),
                      'w') as of:
                print(wphase_results.json(indent=2), file=of)
        except Exception as e:
            # not sure how we would get here, but we just don't want
            # to stop the rest of processing
            logger.exception("Failed dumping result to JSON.")

    # re-raise any errors from the dark side
    if wphase_results.Error:
        raise Exception(wphase_results.StackTrace)

    return wphase_results
Exemple #18
0
 def __init__(self, stream):
     self.stream = stream
     client = Client("IRIS")
     self.inv = client.get_stations(network=stream.traces[0].stats.network,
                                    station=stream.traces[0].stats.station,
                                    level='response')
Exemple #19
0
 def download_body_waveforms(self, outdir, fskip=False, client_name='IRIS', minDelta=30, maxDelta=150, channel_rank=['BH', 'HH'],\
         phase='P', startoffset=-30., endoffset=60.0, verbose=False, rotation=True, startdate=None, enddate=None):
     """Download body wave data from IRIS server
     ====================================================================================================================
     ::: input parameters :::
     outdir          - output directory
     fskip           - flag for downloa/overwrite
                         False   - overwrite
                         True    - skip upon existence
     min/maxDelta    - minimum/maximum epicentral distance, in degree
     channel_rank    - rank of channel types
     phase           - body wave phase to be downloaded, arrival time will be computed using taup
     start/endoffset - start and end offset for downloaded data
     rotation        - rotate the seismogram to RT or not
     =====================================================================================================================
     """
     if not os.path.isdir(outdir):
         os.makedirs(outdir)
     client          = Client(client_name)
     ievent          = 0
     Ntrace          = 0
     try:
         stime4down  = obspy.core.utcdatetime.UTCDateTime(startdate)
     except:
         stime4down  = obspy.UTCDateTime(0)
     try:
         etime4down  = obspy.core.utcdatetime.UTCDateTime(enddate)
     except:
         etime4down  = obspy.UTCDateTime()
     print('[%s] [DOWNLOAD BODY WAVE] Start downloading body wave data' %datetime.now().isoformat().split('.')[0])
     try:
         print (self.cat)
     except AttributeError:
         self.copy_catalog()
     for event in self.cat:
         event_id        = event.resource_id.id.split('=')[-1]
         pmag            = event.preferred_magnitude()
         magnitude       = pmag.mag
         Mtype           = pmag.magnitude_type
         event_descrip   = event.event_descriptions[0].text+', '+event.event_descriptions[0].type
         porigin         = event.preferred_origin()
         otime           = porigin.time
         if otime < stime4down or otime > etime4down:
             continue
         ievent          += 1
         try:
             print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0] + \
                         'Event ' + str(ievent)+': '+ str(otime)+' '+ event_descrip+', '+Mtype+' = '+str(magnitude))
         except:
             print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0] + \
                 'Event ' + str(ievent)+': '+ str(otime)+' '+ event_descrip+', M = '+str(magnitude))
         evlo            = porigin.longitude
         evla            = porigin.latitude
         try:
             evdp        = porigin.depth/1000.
         except:
             continue
         evstr           = '%s' %otime.isoformat()
         outfname        = outdir + '/' + evstr+'.mseed'
         logfname        = outdir + '/' + evstr+'.log'
         # check file existence
         if os.path.isfile(outfname):
             if fskip:
                 if os.path.isfile(logfname):
                     os.remove(logfname)
                     os.remove(outfname)
                 else:
                     continue
             else:
                 os.remove(outfname)
                 if os.path.isfile(logfname):
                     os.remove(logfname)
         elif os.path.isfile(logfname):
             try:
                 with open(logfname, 'r') as fid:
                     logflag     = fid.readline().split()[0][:4]
                 if logflag == 'DONE' and fskip:
                     continue
             except:
                 pass 
         # initialize log file
         with open(logfname, 'w') as fid:
             fid.writelines('DOWNLOADING\n')
         out_stream      = obspy.Stream()
         itrace          = 0
         for staid in self.waveforms.list():
             netcode, stacode    = staid.split('.')
             with warnings.catch_warnings():
                 warnings.simplefilter("ignore")
                 tmppos          = self.waveforms[staid].coordinates
             stla                = tmppos['latitude']
             stlo                = tmppos['longitude']
             elev                = tmppos['elevation_in_m']
             elev                = elev/1000.
             az, baz, dist       = geodist.inv(evlo, evla, stlo, stla)
             dist                = dist/1000.
             if baz<0.:
                 baz             += 360.
             Delta               = obspy.geodetics.kilometer2degrees(dist)
             if Delta<minDelta:
                 continue
             if Delta>maxDelta:
                 continue
             arrivals            = taupmodel.get_travel_times(source_depth_in_km=evdp, distance_in_degree=Delta, phase_list=[phase])#, receiver_depth_in_km=0)
             try:
                 arr             = arrivals[0]
                 arrival_time    = arr.time
                 rayparam        = arr.ray_param_sec_degree
             except IndexError:
                 continue
             starttime           = otime + arrival_time + startoffset
             endtime             = otime + arrival_time + endoffset
             with warnings.catch_warnings():
                 warnings.simplefilter("ignore")
                 location        = self.waveforms[staid].StationXML[0].stations[0].channels[0].location_code
             # determine type of channel
             channel_type        = None
             for tmpch_type in channel_rank:
                 channel         = '%sE,%sN,%sZ' %(tmpch_type, tmpch_type, tmpch_type)
                 try:
                     st          = client.get_waveforms(network=netcode, station=stacode, location=location, channel=channel,
                                         starttime=starttime, endtime=endtime, attach_response=True)
                     if len(st) >= 3:
                         channel_type= tmpch_type
                         break
                 except:
                     pass
             if channel_type is None:
                 if verbose:
                     print ('--- No data for:', staid)
                 continue
             pre_filt            = (0.04, 0.05, 20., 25.)
             st.detrend()
             try:
                 st.remove_response(pre_filt=pre_filt, taper_fraction=0.1)
             except ValueError:
                 print ('!!! ERROR with response removal for:', staid)
                 continue 
             if rotation:
                 try:
                     st.rotate('NE->RT', back_azimuth=baz)
                 except:
                     continue
             if verbose:
                 print ('--- Getting data for:', staid)
             # append stream
             out_stream  += st
             itrace      += 1
             Ntrace      += 1
         # save data to miniseed
         if itrace != 0:
             out_stream.write(outfname, format = 'mseed', encoding = 'FLOAT64')
             os.remove(logfname) # delete log file
         else:
             with open(logfname, 'w') as fid:
                 fid.writelines('DONE\n')
         print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0]+\
               'Event ' + str(ievent)+': dowloaded %d traces' %itrace)
     print('[%s] [DOWNLOAD BODY WAVE] All done' %datetime.now().isoformat().split('.')[0] + ' %d events, %d traces' %(ievent, Ntrace))
     return
Exemple #20
0
 def get_events(self, startdate, enddate, add2dbase=True, gcmt=False, Mmin=5.5, Mmax=None,
         minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, latitude=None, longitude=None,\
         minradius=None, maxradius=None, mindepth=None, maxdepth=None, magnitudetype=None, outquakeml=None):
     """Get earthquake catalog from IRIS server
     =======================================================================================================
     ::: input parameters :::
     startdate, enddate  - start/end date for searching
     Mmin, Mmax          - minimum/maximum magnitude for searching                
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     mindepth            - Limit to events with depth, in kilometers, larger than the specified minimum.
     maxdepth            - Limit to events with depth, in kilometers, smaller than the specified maximum.
     magnitudetype       - Specify a magnitude type to use for testing the minimum and maximum limits.
     =======================================================================================================
     """
     starttime   = obspy.core.utcdatetime.UTCDateTime(startdate)
     endtime     = obspy.core.utcdatetime.UTCDateTime(enddate)
     if not gcmt:
         client  = Client('IRIS')
         try:
             catISC      = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=Mmin, maxmagnitude=Mmax, catalog='ISC',
                             minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                             latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, mindepth=mindepth,
                             maxdepth=maxdepth, magnitudetype=magnitudetype)
             endtimeISC  = catISC[0].origins[0].time
         except:
             catISC      = obspy.core.event.Catalog()
             endtimeISC  = starttime
         if endtime.julday-endtimeISC.julday >1:
             try:
                 catPDE  = client.get_events(starttime=endtimeISC, endtime=endtime, minmagnitude=Mmin, maxmagnitude=Mmax, catalog='NEIC PDE',
                             minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                             latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, mindepth=mindepth,
                             maxdepth=maxdepth, magnitudetype=magnitudetype)
                 catalog = catISC+catPDE
             except:
                 catalog = catISC
         else:
             catalog     = catISC
         outcatalog      = obspy.core.event.Catalog()
         # check magnitude
         for event in catalog:
             if event.magnitudes[0].mag < Mmin:
                 continue
             outcatalog.append(event)
     else:
         # Updated the URL on Jul 25th, 2020
         gcmt_url_old    = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/jan76_dec17.ndk'
         gcmt_new        = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/NEW_MONTHLY'
         if starttime.year < 2005:
             print('--- Loading catalog: '+gcmt_url_old)
             cat_old     = obspy.read_events(gcmt_url_old)
             if Mmax != None:
                 cat_old = cat_old.filter("magnitude <= %g" %Mmax)
             if maxlongitude != None:
                 cat_old = cat_old.filter("longitude <= %g" %maxlongitude)
             if minlongitude != None:
                 cat_old = cat_old.filter("longitude >= %g" %minlongitude)
             if maxlatitude != None:
                 cat_old = cat_old.filter("latitude <= %g" %maxlatitude)
             if minlatitude != None:
                 cat_old = cat_old.filter("latitude >= %g" %minlatitude)
             if maxdepth != None:
                 cat_old = cat_old.filter("depth <= %g" %(maxdepth*1000.))
             if mindepth != None:
                 cat_old = cat_old.filter("depth >= %g" %(mindepth*1000.))
             temp_stime  = obspy.core.utcdatetime.UTCDateTime('2018-01-01')
             outcatalog  = cat_old.filter("magnitude >= %g" %Mmin, "time >= %s" %str(starttime), "time <= %s" %str(endtime) )
         else:
             outcatalog      = obspy.core.event.Catalog()
             temp_stime      = copy.deepcopy(starttime)
             temp_stime.day  = 1
         while (temp_stime < endtime):
             year            = temp_stime.year
             month           = temp_stime.month
             yearstr         = str(int(year))[2:]
             monstr          = monthdict[month]
             monstr          = monstr.lower()
             if year==2005 and month==6:
                 monstr      = 'june'
             if year==2005 and month==7:
                 monstr      = 'july'
             if year==2005 and month==9:
                 monstr      = 'sept'
             gcmt_url_new    = gcmt_new+'/'+str(int(year))+'/'+monstr+yearstr+'.ndk'
             try:
                 cat_new     = obspy.read_events(gcmt_url_new, format='ndk')
                 print('--- Loading catalog: '+gcmt_url_new)
             except:
                 print('--- Link not found: '+gcmt_url_new)
                 break
             cat_new         = cat_new.filter("magnitude >= %g" %Mmin, "time >= %s" %str(starttime), "time <= %s" %str(endtime) )
             if Mmax != None:
                 cat_new     = cat_new.filter("magnitude <= %g" %Mmax)
             if maxlongitude != None:
                 cat_new     = cat_new.filter("longitude <= %g" %maxlongitude)
             if minlongitude!=None:
                 cat_new     = cat_new.filter("longitude >= %g" %minlongitude)
             if maxlatitude!=None:
                 cat_new     = cat_new.filter("latitude <= %g" %maxlatitude)
             if minlatitude!=None:
                 cat_new     = cat_new.filter("latitude >= %g" %minlatitude)
             if maxdepth != None:
                 cat_new     = cat_new.filter("depth <= %g" %(maxdepth*1000.))
             if mindepth != None:
                 cat_new     = cat_new.filter("depth >= %g" %(mindepth*1000.))
             outcatalog      += cat_new
             try:
                 temp_stime.month    +=1
             except:
                 temp_stime.year     +=1
                 temp_stime.month    = 1
     try:
         self.cat    += outcatalog
     except:
         self.cat    = outcatalog
     if add2dbase:
         self.add_quakeml(outcatalog)
     if outquakeml is not None:
         self.cat.write(outquakeml, format='quakeml')
     return
def fdsnws2geomag():
    '''Convert fdsnws query to geomagnetic data file'''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['internet', 'iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument('--output',
                        default=sys.stdout,
                        help='Output file (default: stdout).')
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station', required=True, help='Station code')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        ",".join(DEFAULT_CHANNELS))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    logging.info("Writing informtion to %s", str(args.output))
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream.merge_by_location().trim(starttime,
                                    endtime).write(args.output,
                                                   format=args.format,
                                                   inventory=inventory)
Exemple #22
0
from obspy.clients.fdsn.client import Client
import obspy

client = Client('IRIS')
starttime = obspy.core.utcdatetime.UTCDateTime('2011-12-01')
endtime = obspy.core.utcdatetime.UTCDateTime('2011-12-31')
cat = client.get_events(starttime=starttime,
                        endtime=endtime,
                        minmagnitude=5.5,
                        catalog='ISC',
                        magnitudetype='mb')
cat
Exemple #23
0
 def test_to_pyrocko_events(self):
     from obspy.clients.fdsn.client import Client
     client = Client('IRIS')
     cat = client.get_events(eventid=609301)
     events = cat.to_pyrocko_events()
     self.assertEqual(len(events), len(cat))
Exemple #24
0
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
#from sklearn.metrics import roc_curve, auc
from obspy.clients.fdsn.client import Client
from obspy.core import UTCDateTime
#
working_dir = '/Users/ljyi/Desktop/SYS6018/final_project'
os.chdir(working_dir)

# =========================== Data Exploration 1. =============================
# prepare variables for data downloading
client = Client('USGS')
start_time = UTCDateTime("1990-01-01T00:00:00")
end_time = UTCDateTime("1991-01-01T00:00:00")  # "2018-11-01T00:00:00"

# sent start time and end time to collect data
t1 = start_time
t2 = end_time

# # download earthquake data
#cat = client.get_events(starttime=t1, endtime=t2, minmagnitude=5.5)
cat = client.get_events(starttime=t1, endtime=t2, minmagnitude=4.5)

# save data into a easier to use format
grid_size = 5
lat_list = []
lon_list = []
Exemple #25
0
def get_iris_event_data(bulk, folder, timestr, dataless, event):
    from obspy import UTCDateTime
    from obspy.clients.fdsn.client import Client
    #from obspy.fdsn import Client
    from os import path
    from numpy import nan, isnan
    from mapping_tools import distance
    '''
    Code to extract IRIS data, one station at a time.  Exports mseed file to 
    working directory
    
    datetime tuple fmt = (Y,m,d,H,M)
    sta = station
    '''

    fdsn_client = Client("IRIS")
    #client = Client("IRIS")
    sta = []
    #st = client.get_waveforms_bulk(bulk)
    for b in bulk:
        try:
            fname = '.'.join((timestr, b[0], b[1], 'mseed'))
            fpath = path.join(folder, fname.replace(':', '.'))

            staloc = nan
            #first, check it site is in distance and azimuthal range
            for channel in ['SHZ', 'EHZ', 'BHZ', 'HHZ', 'BNZ', 'HNZ']:
                if b[0] == 'WRAB':
                    locCode = '10'
                else:
                    locCode = '00'
                seedid = '.'.join(
                    (b[0], b[1], locCode, channel))  # e.g., 'AU.DPH.00.BNZ'
                try:
                    staloc = dataless.get_coordinates(seedid, b[4])
                except:
                    a = 1  # dummy call
                seedid = '.'.join(
                    (b[0], b[1], '', channel))  # e.g., 'AU.DPH..BNZ'
                try:
                    staloc = dataless.get_coordinates(seedid, b[4])
                except:
                    a = 1  # dummy call

            # now get distance and azimuth
            rngkm, az, baz = distance(event['lat'], event['lon'],
                                      staloc['latitude'], staloc['longitude'])
            print(rngkm, az, baz)

            getRecord = False
            if rngkm <= 2000. and az > 130. and az < 230.:
                getRecord = True
            elif rngkm <= 2000. and az > 120. and az < 240. and b[1] == 'RABL':
                getRecord = True
            elif rngkm <= 2000. and az > 120. and az < 240. and b[1] == 'PMG':
                getRecord = True

            # second, check if file exists
            #print(path.isfile(fpath), getRecord)
            if not path.isfile(fpath) and getRecord == True:
                bulk2 = [(b[0], b[1], b[2], "*", b[4], b[5])]  #,
                print('B2', bulk2)
                #                         ("AU", "AFI", "1?", "BHE",  b[4], b[5])]
                client = Client("IRIS")
                #st = client.get_waveforms_bulk(bulk2)
                st = client.get_waveforms(b[0], b[1], b[2], "*", b[4], b[5])
                '''
                 st = fdsn_client.get_waveforms(network=b[0], station=b[1], location=b[2],
                                                channel=b[3], starttime=b[4], endtime=b[5],
                                                attach_response=True)
                 '''
                #print(st[0].stats.location)
                st = st.merge(method=0, fill_value='interpolate')
                sta += st

                print('Writing file: ' + fpath)
                st.write(fpath, format="MSEED")
            else:
                print('File exists:', fpath)
            #return st
        except:
            print('No data for', b[0], b[1])

    return sta
def makeStationList(client_list,
                    min_lat,
                    max_lat,
                    min_lon,
                    max_lon,
                    start_time,
                    end_time,
                    channel_list=[],
                    filter_network=[],
                    filter_station=[]):
    """
        Uses fdsn to find availave stations in a specific geographical location and time period. 

        Parameters
        ----------
           client_list : str
               List of client names e.g. ["IRIS", "SCEDC", "USGGS"].
           minlat : float
               Min latitude of the region.
           maxlat : float
               Max latitude of the region.
           minlon : float
               Min longitude of the region.
           maxlon : float
               Max longitude of the region.           
           start_time : str
               Start dateitme for the beginging of period in "YYYY-MM-DDThh:mm:ss.f" format.
           end_time : str
               End dateitme in "YYYY-MM-DDThh:mm:ss.f" format.          
           channel_list : str, default [] --> all channels
               A list containting the desired channel codes. Downloads will be limited to these channels based on priority. 
           filter_network : str, default []
               A list containting the network codes that needs to be avoided.               
           filter_station : str, default []
               A list containting the station names that needs to be avoided.        
        Generates
        -------
        ./stations_list.json
                    
     """
    station_list = {}
    for cl in client_list:
        inventory = Client(cl).get_stations(minlatitude=min_lat,
                                            maxlatitude=max_lat,
                                            minlongitude=min_lon,
                                            maxlongitude=max_lon,
                                            starttime=UTCDateTime(start_time),
                                            endtime=UTCDateTime(end_time),
                                            level='channel')

        for ev in inventory:
            net = ev.code
            if net not in filter_network:
                for st in ev:
                    station = st.code
                    print(str(net) + "--" + str(station))

                    if station not in filter_station:
                        elv = st.elevation
                        lat = st.latitude
                        lon = st.longitude
                        new_chan = [ch.code for ch in st.channels]
                        if len(channel_list) > 0:
                            chan_priority = [ch[:2] for ch in channel_list]

                            for chnn in chan_priority:
                                if chnn in [ch[:2] for ch in new_chan]:
                                    new_chan = [
                                        ch for ch in new_chan if ch[:2] == chnn
                                    ]

# =============================================================================
#                      if ("BHZ" in new_chan) and ("HHZ" in new_chan):
#                          new_chan = [ch for ch in new_chan if ch[:2] != "BH"]
#                      if ("HHZ" in new_chan) and ("HNZ" in new_chan):
#                          new_chan = [ch for ch in new_chan if ch[:2] != "HH"]
#
#                          if len(new_chan)>3 and len(new_chan)%3 != 0:
#                              chan_type = [ch for ch in new_chan if ch[2] == 'Z']
#                              chan_groups = []
#                              for i, cht in enumerate(chan_type):
#                                  chan_groups.append([ch for ch in new_chan if ch[:2] == cht[:2]])
#                              new_chan2 = []
#                              for chg in chan_groups:
#                                  if len(chg) == 3:
#                                      new_chan2.append(chg)
#                              new_chan = new_chan2
# =============================================================================
                        if len(new_chan) > 0 and (station not in station_list):
                            station_list[str(station)] = {
                                "network": net,
                                "channels": list(set(new_chan)),
                                "coords": [lat, lon, elv]
                            }

    with open('station_list.json', 'w') as fp:
        json.dump(station_list, fp)
Exemple #27
0
import numpy as np

from obspy.core import UTCDateTime
from obspy.core.stream import Stream


from getwave import getWave

import csv

#initialise IRIS client to query if the desired channel is not in our database.
#this client needs to be initialised here because the initialiser spawns multiple threads.
#this is forbidden on import in Python 2 so it cannot be initialised in the getwave module
from obspy.clients.fdsn.client import Client
from obspy.geodetics.base import gps2dist_azimuth as distaz
irisclient=Client("IRIS")
#load ISC pick catalogue CSV

saveDir="/g/data/ha3/rlt118/neural-datasets/categoriser-teleseismic/smallset/"

Sctr=0
wfctr=0
with open('/g/data/ha3/Passive/Events/BabakHejrani/ISC.csv') as ISCpicks:
    pickrdr=csv.reader(ISCpicks,delimiter=',')
    event=""
    for pick in pickrdr:
        st=pick[0].strip()
        if st=='#ISC': #store event metadata then don't try to process this as a pick
            event=pick
            evlat=float(event[6].strip())
            evlong=float(event[7].strip())
Exemple #28
0
    # cut off microseconds
    date = UTCDateTime(input.partition(".")[0])

    # create working dir folder respective to the event datetime
    working_dir = os.path.join(cfg.output_dir, date.strftime("%Y%m%d%H%M%S"))

    if os.path.exists(working_dir):
        logging.info("Working directory already exists, thus event (" +
                     str(input) + ") is already calculated! Exiting...")

    else:
        logging.info("Creating working directory (" + working_dir + ")...")
        os.makedirs(working_dir)

        logging.info("Connecting to FDSN Server:  " + cfg.fdsnws_url + "...")
        fdsn = Client(cfg.fdsnws_url)

        if cfg.sds_url:
            logging.info("Connecting to SDS archive:  " + cfg.sds_url + "...")
            sds = Client_SDS(cfg.sds_url)

        logging.info("Retrieving event's info...")

        # get events info
        # catalog object is returned and
        # the first element -event object- is retrieved
        # get the event between 1 second from
        # the input event time
        # if two events occur between 1 second
        # it will just get the first one
        event = fdsn.get_events(
Exemple #29
0
def makeStationList(json_path,
                    client_list,
                    min_lat,
                    max_lat,
                    min_lon,
                    max_lon,
                    start_time,
                    end_time,
                    channel_list=[],
                    filter_network=[],
                    filter_station=[],
                    **kwargs):
    """
    
    Uses fdsn to find available stations in a specific geographical location and time period.  

    Parameters
    ----------
    json_path: str
        Path of the json file that will be returned

    client_list: list
        List of client names e.g. ["IRIS", "SCEDC", "USGGS"].
                                
    min_lat: float
        Min latitude of the region.
        
    max_lat: float
        Max latitude of the region.
        
    min_lon: float
        Min longitude of the region.
        
    max_lon: float
        Max longitude of the region.
        
    start_time: str
        Start DateTime for the beginning of the period in "YYYY-MM-DDThh:mm:ss.f" format.
        
    end_time: str
        End DateTime for the beginning of the period in "YYYY-MM-DDThh:mm:ss.f" format.
        
    channel_list: str, default=[]
        A list containing the desired channel codes. Downloads will be limited to these channels based on priority. Defaults to [] --> all channels
        
    filter_network: str, default=[]
        A list containing the network codes that need to be avoided. 
        
    filter_station: str, default=[]
        A list containing the station names that need to be avoided.

    kwargs: 
        special symbol for passing Client.get_stations arguments

    Returns
    ----------
    stations_list.json: A dictionary containing information for the available stations.      
        
     """

    station_list = {}
    for cl in client_list:
        inventory = Client(cl).get_stations(minlatitude=min_lat,
                                            maxlatitude=max_lat,
                                            minlongitude=min_lon,
                                            maxlongitude=max_lon,
                                            starttime=UTCDateTime(start_time),
                                            endtime=UTCDateTime(end_time),
                                            level='channel',
                                            **kwargs)

        for ev in inventory:
            net = ev.code
            if net not in filter_network:
                for st in ev:
                    station = st.code
                    print(str(net) + "--" + str(station))

                    if station not in filter_station:

                        elv = st.elevation
                        lat = st.latitude
                        lon = st.longitude
                        new_chan = [ch.code for ch in st.channels]
                        if len(channel_list) > 0:
                            chan_priority = [ch[:2] for ch in channel_list]

                            for chnn in chan_priority:
                                if chnn in [ch[:2] for ch in new_chan]:
                                    new_chan = [
                                        ch for ch in new_chan if ch[:2] == chnn
                                    ]

# =============================================================================
#                      if ("BHZ" in new_chan) and ("HHZ" in new_chan):
#                          new_chan = [ch for ch in new_chan if ch[:2] != "BH"]
#                      if ("HHZ" in new_chan) and ("HNZ" in new_chan):
#                          new_chan = [ch for ch in new_chan if ch[:2] != "HH"]
#
#                          if len(new_chan)>3 and len(new_chan)%3 != 0:
#                              chan_type = [ch for ch in new_chan if ch[2] == 'Z']
#                              chan_groups = []
#                              for i, cht in enumerate(chan_type):
#                                  chan_groups.append([ch for ch in new_chan if ch[:2] == cht[:2]])
#                              new_chan2 = []
#                              for chg in chan_groups:
#                                  if len(chg) == 3:
#                                      new_chan2.append(chg)
#                              new_chan = new_chan2
# =============================================================================

                        if len(new_chan) > 0 and (station not in station_list):
                            station_list[str(station)] = {
                                "network": net,
                                "channels": list(set(new_chan)),
                                "coords": [lat, lon, elv]
                            }
    json_dir = os.path.dirname(json_path)
    if not os.path.exists(json_dir):
        os.makedirs(json_dir)
    with open(json_path, 'w') as fp:
        json.dump(station_list, fp)
Exemple #30
0
    def create_SG2K_initiate(self, event, quake_df):

        # Launch the custom station/component selection dialog
        sel_dlg = selectionDialog(parent=self,
                                  sta_list=self.station_list,
                                  chan_list=self.channel_codes)
        if sel_dlg.exec_():
            select_sta, select_comp = sel_dlg.getSelected()

            # specify output directory for miniSEED files
            temp_seed_out = os.path.join(os.path.dirname(self.cat_filename),
                                         event)

            # create directory
            if os.path.exists(temp_seed_out):
                shutil.rmtree(temp_seed_out)
            os.mkdir(temp_seed_out)

            query_time = UTCDateTime(quake_df['qtime'] - (10 * 60)).timestamp

            trace_starttime = UTCDateTime(quake_df['qtime'] - (5 * 60))
            trace_endtime = UTCDateTime(quake_df['qtime'] + (15 * 60))

            # Create a Stream object to put data into
            # st = Stream()
            # Create a dictionary to put traces into (keys are tr_ids)
            st_dict = defaultdict(list)

            print('---------------------------------------')
            print('Finding Data for Earthquake: ' + event)

            if os.path.splitext(self.db_filename)[1] == ".db":
                # run SQL query
                for matched_entry in self.session.query(Waveforms). \
                        filter(or_(and_(Waveforms.starttime <= query_time, query_time < Waveforms.endtime),
                                   and_(query_time <= Waveforms.starttime, Waveforms.starttime < query_time + 30 * 60)),
                               Waveforms.station.in_(select_sta),
                               Waveforms.component.in_(select_comp)):
                    print(matched_entry.ASDF_tag)

                    # read in the data to obspy
                    temp_st = read(
                        os.path.join(matched_entry.path,
                                     matched_entry.waveform_basename))

                    # modify network header
                    temp_tr = temp_st[0]
                    temp_tr.stats.network = matched_entry.new_network

                    # st.append(temp_tr)
                    st_dict[temp_tr.get_id()].append(temp_tr)

            if os.path.splitext(self.db_filename)[1] == ".json":
                # run python dictionary query
                for key, matched_entry in self.network_dict.iteritems():
                    if ((matched_entry['starttime'] <= query_time < matched_entry['endtime']) \
                                or (
                                query_time <= matched_entry['starttime'] and matched_entry['starttime'] < query_time + (
                            30 * 60))) \
                            and ((matched_entry['station'] in select_sta) and (
                                matched_entry['component'] in select_comp)):
                        print(matched_entry['ASDF_tag']
                              )  #, os.path.join(matched_entry['path'], key))

                        # read in the data to obspy
                        temp_st = read(os.path.join(matched_entry['path'],
                                                    key))

                        # modify network header
                        temp_tr = temp_st[0]
                        temp_tr.stats.network = matched_entry['new_network']

                        # trim trace to start and endtime
                        temp_tr.trim(starttime=trace_starttime,
                                     endtime=trace_endtime)

                        # st.append(temp_tr)
                        st_dict[temp_tr.get_id()].append(temp_tr)

            # free memory
            temp_st = None
            temp_tr = None

            if not len(st_dict) == 0:
                # .__nonzero__():

                print('')
                print('Merging Traces from %s Stations....' % len(st_dict))
                # Attempt to merge all traces with matching ID'S (same keys in dict) in place
                # st.merge()

                for key in st_dict.keys():
                    if len(st_dict[key]) > 1:
                        temp_st = Stream(traces=st_dict[key])
                        # merge in place
                        # print('\tMerging %s in Stream:' % temp_st.count())
                        temp_st.merge()
                        # assign trace back to dictionary key if there is data
                        if temp_st.__nonzero__():
                            print("Station {0} has {1} Seconds of data".format(
                                key, temp_st[0].stats.endtime -
                                temp_st[0].stats.starttime))
                            st_dict[key] = temp_st[0]
                        elif not temp_st.__nonzero__():
                            print("No Data for: %s" % key)
                            # no data for station delete key
                            del st_dict[key]
                            continue
                    elif len(st_dict[key]) == 1:
                        print("Station {0} has {1} Seconds of data".format(
                            key, st_dict[key][0].stats.endtime -
                            st_dict[key][0].stats.starttime))
                        st_dict[key] = st_dict[key][0]
                    elif len(st_dict[key]) == 0:
                        # no data for station delete key
                        print("No Data for: %s" % key)
                        del st_dict[key]

                print(
                    '\nTrimming Traces to 20 mins around earthquake time....')

                # now trim the st object to 5 mins
                # before query time and 15 minutes afterwards

                for key in st_dict.keys():

                    st_dict[key] = st_dict[key].trim(starttime=trace_starttime,
                                                     endtime=trace_endtime,
                                                     pad=True,
                                                     fill_value=0)

                # st.trim(starttime=trace_starttime, endtime=trace_endtime, pad=True, fill_value=0)

                try:
                    # write traces into temporary directory
                    # for tr in st:
                    for key in st_dict.keys():
                        if type(st_dict[key]) == Stream:
                            #there is a problem with network codes (two stations named the same)
                            #ignore it for now
                            continue
                        st_dict[key].write(os.path.join(
                            temp_seed_out, st_dict[key].get_id() + ".MSEED"),
                                           format="MSEED")
                    print("\nWrote Temporary MiniSEED data to: " +
                          temp_seed_out)
                    print('')
                except:
                    print("Something Went Wrong!")

            else:
                print("No Data for Earthquake!")

            # free memory
            st_dict = None

            # Now requesting reference station data from IRIS if desired
            if self.ref_radioButton.isChecked():
                ref_dir = os.path.join(temp_seed_out, 'ref_data')

                # create ref directory
                if os.path.exists(ref_dir):
                    shutil.rmtree(ref_dir)
                os.mkdir(ref_dir)

                # request stations that are close to the selected stations

                # first use the coords lists to get a bounding box for array
                def calc_bounding_box(x, y):
                    min_x, max_x = (min(x), max(x))
                    min_y, max_y = (min(y), max(y))

                    return (min_x, max_x, min_y, max_y)

                bb = calc_bounding_box(self.station_coords[0],
                                       self.station_coords[1])

                # request data for near earthquake time up to 5 degrees from bounding box of array
                print(
                    '\nRequesting Waveform Data from Nearby Permanent Network Stations....'
                )

                client = Client("IRIS")
                self.ref_inv = client.get_stations(
                    network="AU",
                    starttime=UTCDateTime(quake_df['qtime'] - (5 * 60)),
                    endtime=UTCDateTime(quake_df['qtime'] + (15 * 60)),
                    minlongitude=bb[0] - 2,
                    maxlongitude=bb[1] + 2,
                    minlatitude=bb[2] - 2,
                    maxlatitude=bb[3] + 2,
                    level='channel')

                print(self.ref_inv)

                ref_st = Stream()

                # go through inventory and request timeseries data
                for net in self.ref_inv:
                    for stn in net:
                        try:
                            ref_st += client.get_waveforms(
                                network=net.code,
                                station=stn.code,
                                channel='*',
                                location='*',
                                starttime=UTCDateTime(quake_df['qtime'] -
                                                      (5 * 60)),
                                endtime=UTCDateTime(quake_df['qtime'] +
                                                    (15 * 60)))
                        except FDSNException:
                            print(
                                'No Data for Earthquake from Reference Station: '
                                + stn.code)

                        else:
                            # plot the reference stations
                            js_call = "addRefStation('{station_id}', {latitude}, {longitude});" \
                                .format(station_id=stn.code, latitude=stn.latitude,
                                        longitude=stn.longitude)
                            self.web_view.page().mainFrame(
                            ).evaluateJavaScript(js_call)

                try:
                    # write ref traces into temporary directory
                    for tr in ref_st:
                        tr.write(os.path.join(ref_dir, tr.id + ".MSEED"),
                                 format="MSEED")
                    print("Wrote Reference MiniSEED data to: " + ref_dir)
                    print('\nEarthquake Data Query Done!!!')
                except:
                    print("Something Went Wrong Writing Reference Data!")

                self.ref_inv.write(os.path.join(ref_dir, "ref_metadata.xml"),
                                   format="STATIONXML")