def generateStationTestData(sta):

    time_range = (UTCDateTime(TIME_RANGE[0]), UTCDateTime(TIME_RANGE[1]))

    client = Client("IRIS")
    inv = client.get_stations(network=NETWORK,
                              station=sta,
                              channel=CHANNEL,
                              starttime=time_range[0],
                              endtime=time_range[1],
                              level='channel')
    print(inv)

    traces = client.get_waveforms(network=NETWORK,
                                  station=sta,
                                  channel=CHANNEL,
                                  location='*',
                                  starttime=time_range[0],
                                  endtime=time_range[1])
    print(traces)

    outfile = 'test_data_' + sta + '.h5'
    asdf_out = pyasdf.ASDFDataSet(outfile, mode='w')
    asdf_out.add_stationxml(inv)
    asdf_out.add_waveforms(traces, TAG)

    print("Saved data to " + outfile)
    def queryByBBoxInterval(self, outputFileName, bbox, timeinterval, chan='*Z', bbpadding=2,
                            event_id=None, verbose=False):
        """ Time interval is a tuple (starttime,endtime)
        """
        assert len(timeinterval) == 2, "timeinterval must be a tuple of ascending timestamps. len=" + str(
            len(timeinterval)) + " " + str(timeinterval)

        query_ds = pyasdf.ASDFDataSet(outputFileName)

        client = Client(self._client)
        ref_inv = client.get_stations(network=self._network,
                                      starttime=UTCDateTime(timeinterval[0]),
                                      endtime=UTCDateTime(timeinterval[1]),
                                      minlongitude=bbox[0] - bbpadding,
                                      maxlongitude=bbox[1] + bbpadding,
                                      minlatitude=bbox[2] - bbpadding,
                                      maxlatitude=bbox[3] + bbpadding,
                                      level='channel')

        if verbose:
            print(ref_inv)

        ref_st = Stream()

        # go through inventory and request timeseries data
        for net in ref_inv:
            for stn in net:
                stime = UTCDateTime(timeinterval[0])
                etime = UTCDateTime(timeinterval[1])
                step = 3600*24*10
                while stime + step < etime:
                    try:
                        ref_st = client.get_waveforms(network=net.code, station=stn.code,
                                                      channel=chan, location='*',
                                                      starttime=stime,
                                                      endtime=stime+step)
                        print ref_st
                        self.ref_stations.append(net.code + '.' + stn.code)
                        st_inv = ref_inv.select(station=stn.code, channel=chan)
                        
                        query_ds.add_stationxml(st_inv)
                        for tr in ref_st:
                            query_ds.add_waveforms(tr, "reference_station")
                    except FDSNException:
                        print('Data not available from Reference Station: ' + stn.code)
                    # end try
                    stime += step
                #wend
        # end for

        #tr.write(os.path.join(os.path.dirname(outputFileName), tr.id + ".MSEED"),
        #         format="MSEED") # Don't write miniseed
        if verbose:
            print("Wrote Reference Waveforms to ASDF file: " + outputFileName)
            print('\nWaveform data query completed.')

        metaOutputFileName = os.path.join(os.path.dirname(outputFileName),
                                          'meta.%s.xml'%(os.path.basename(outputFileName)))
        ref_inv.write(metaOutputFileName, format="STATIONXML")
        del query_ds
    def __init__(self,df,station,outdir):

        self.station = station
        self.data = df
        self.out = outdir
        # self.summary = [] # list to hold all tr_ids
        # print(self.data)
#           Resets indexing of DataFrame

        # print('{}/{}_downloaded_streams.txt'.format(outdir,outdir.split('/')[-1]))
        try:
            #print('Make /Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}'.format(station))
            os.mkdir('{}/{}'.format(self.out,station))
        except FileExistsError:
            print('It already exists, Hooray! Less work for me!')

            # pass
    #   Made

        #self.outfile = open('/Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}/{}_downloaded_streams_Jacks_Split.txt'.format(station,station),'w+')

        self.attempts = 0 #Counter for how many attempted downloads there were
        self.fdsnx = 0 #Counter for how many attempts hit a FDSNNoDataException
        self.dwn = 0 #Counter for how many events were downloaded
        self.ex = 0 #Counter for how many event already exist in filesystem and therefore werent downloaded
        self.ts = 0 #Counter for events who;s traces are too short.
        self.fdsnclient_evt = Client('IRIS') # Serparate client for events (hopefully to get round the "no event avialbel bug")
        self.fdsnclient = Client('IRIS')
def getXmlFromIRIS(network=None):
    if network is None:
        return
    client = Client("IRIS")
    # Use level=response to get channel information...
    # ...which holds the location code necessary for station referencing
    inv = client.get_stations(network=network, level='response')
    inv.write('./' + network + '.xml', format='stationxml', validate=True)
Exemple #5
0
def get_arclink_event_data(bulk, fname, dataless, event):
    from obspy.core.utcdatetime import UTCDateTime
    try:
        from obspy.arclink.client import Client
    except:
        from obspy.clients.arclink.client import Client
        #from obspy.clients.fdsn.client import Client
    from os import path
    from numpy import nan, isnan
    from mapping_tools import distance
    '''
    Code to extract IRIS data, one station at a time.  Exports mseed file to 
    working directory
    
    datetime tuple fmt = (Y,m,d,H,M)
    sta = station
    '''
    try:
        #first, check it site is in distance and azimuthal range
        for channel in ['SHZ', 'EHZ', 'BHZ', 'HHZ', 'BNZ', 'HNZ']:
            seedid = '.'.join((b[0], b[1], '00', channel))  #'AU.DPH.00.BNZ'

            try:
                staloc = dataless.get_coordinates(seedid, b[4])
            except:
                a = 1  # dummy call
            # try another seed id fmt
            seedid = '.'.join((b[0], b[1], '', channel))  #'AU.DPH.00.BNZ'
            try:
                staloc = dataless.get_coordinates(seedid, b[4])
            except:
                a = 1  # dummy call

        # now get distance and azimuth
        rngkm, az, baz = distance(event['lat'], event['lon'],
                                  staloc['latitude'], staloc['longitude'])
        #print(rngkm, az, baz)
        print('arclink', seedid)
        getRecord = False
        if rngkm <= 2000. and az > 110. and az < 250.:
            getRecord = True
        elif rngkm <= 50.:
            getRecord = True

        # check if file already exists
        if not path.isfile(fname) and getRecord == True:
            print('Getting:', fname)
            client = Client(user='******')
            st = client.get_waveforms(bulk[0], bulk[1], bulk[2], bulk[3],
                                      bulk[4], bulk[5])
            st = st.merge(method=0, fill_value='interpolate')

        print('Writing file:', fname)
        st.write(fname, format="MSEED")
    except:
        print('No data for:', fname)

    return st
Exemple #6
0
        def _get_client(client_name):
            # It might already be an initialized client - in that case just
            # use it.
            if isinstance(client_name, Client):
                name, client = client_name.base_url, client_name
            else:
                try:
                    this_client = Client(client_name, debug=self.debug)
                    name, client = client_name, this_client
                except utils.ERRORS as e:
                    if "timeout" in str(e).lower():
                        extra = " (timeout)"
                    else:
                        extra = ""
                    logger.warn("Failed to initialize client '%s'.%s" %
                                (client_name, extra))
                    return client_name, None

            services = sorted([
                _i for _i in client.services.keys()
                if not _i.startswith("available")
            ])
            if "dataselect" not in services or "station" not in services:
                logger.info("Cannot use client '%s' as it does not have "
                            "'dataselect' and/or 'station' services." % name)
                return name, None
            return name, client
Exemple #7
0
 def get_stations(self, startdate=None, enddate=None,  network=None, station=None, location=None, channel=None,
         minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, latitude=None, longitude=None, minradius=None, maxradius=None):
     """Get station inventory from IRIS server
     =======================================================================================================
     Input Parameters:
     startdate, enddata  - start/end date for searching
     network             - Select one or more network codes.
                             Can be SEED network codes or data center defined codes.
                                 Multiple codes are comma-separated (e.g. "IU,TA").
     station             - Select one or more SEED station codes.
                             Multiple codes are comma-separated (e.g. "ANMO,PFO").
     location            - Select one or more SEED location identifiers.
                             Multiple identifiers are comma-separated (e.g. "00,01").
                             As a special case ?--? (two dashes) will be translated to a string of two space
                             characters to match blank location IDs.
     channel             - Select one or more SEED channel codes.
                             Multiple codes are comma-separated (e.g. "BHZ,HHZ").             
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     =======================================================================================================
     """
     try: starttime      = obspy.core.utcdatetime.UTCDateTime(startdate)
     except: starttime   = None
     try: endtime        = obspy.core.utcdatetime.UTCDateTime(enddate)
     except: endtime     = None
     client  = Client('IRIS')
     inv     = client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, channel=channel, 
                 minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                     latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, level='channel')
     self.add_stationxml(inv)
     try: self.inv       += inv
     except: self.inv    = inv
     return 
Exemple #8
0
 def _get_client(client_name):
     try:
         this_client = Client(client_name, debug=self.debug)
     except utils.ERRORS as e:
         if "timeout" in str(e).lower():
             extra = " (timeout)"
         else:
             extra = ""
         logger.warn("Failed to initialize client '%s'.%s"
                     % (client_name, extra))
         return client_name, None
     services = sorted([_i for _i in this_client.services.keys()
                        if not _i.startswith("available")])
     if "dataselect" not in services or "station" not in services:
         logger.info("Cannot use client '%s' as it does not have "
                     "'dataselect' and/or 'station' services."
                     % client_name)
         return client_name, None
     return client_name, this_client
Exemple #9
0
import matplotlib as mpl
import numpy as np
from scipy.signal import hilbert
import matplotlib.animation as animation
Writer = animation.writers['pillow']
writer = Writer(fps=20, metadata=dict(artist='Me'), bitrate=1800)

sta = 'R32B'
loc = '00'
net = 'N4'
chan = 'HHZ'

stime = UTCDateTime('2019-08-16 12:59:10')
etime = stime + 120

client = Client()
inv = client.get_stations(network=net,
                          station=sta,
                          starttime=stime,
                          endtime=etime,
                          channel=chan,
                          level="response")
st = Stream()
st += client.get_waveforms(net, sta, loc, chan, stime, etime)

st.detrend('constant')
st.merge(fill_value=0)
st.attach_response(inv)
st.remove_response(output="DISP")
#st.rotate(method="->ZNE",inventory=inv)
st.filter("bandpass", freqmin=.5, freqmax=5)
Exemple #10
0
#Parallelised autopick harvester. We have like a million picks so this is the only way to go about it
import sys

from seismic.ASDFdatabase.FederatedASDFDataSet import FederatedASDFDataSet
from seismic.ml_classifier.data_harvester.autopicks import pickLoaderRand

from obspy.clients.fdsn.client import Client
ic = Client("IRIS")
fds = FederatedASDFDataSet(
    '/g/data/ha3/Passive/SHARED_DATA/Index/asdf_files.txt',
    variant='db',
    use_json_db=True,
    logger=None)

import numpy as np

pl = pickLoaderRand(fds, ic)

import multiprocessing as mp

nproc = mp.cpu_count()
print(nproc)


def lockInit(l):
    global lock
    lock = l


l = mp.Lock()
pool = mp.Pool(processes=nproc, initializer=lockInit, initargs=(l, ))
def iris_client():
    """return the IRIS client"""
    try:
        return Client()
    except FDSNException:
        pytest.skip("could not connect to webservice")
Exemple #12
0
#this one does teleseismic (>10 degrees) S-wave picks from the ISC catalogue

from mat4py import *
import numpy as np

from obspy.core import UTCDateTime

from getwave import getWave

import csv

#initialise IRIS client to query if the desired channel is not in our database.
#this client needs to be initialised here because the initialiser spawns multiple threads.
#this is forbidden on import in Python 2 so it cannot be initialised in the getwave module
from obspy.clients.fdsn.client import Client
irisclient=Client("IRIS")

#load ISC pick catalogue, stored in 4 separate .mat files

Sctr=0
wfctr=0
with open('/g/data/ha3/Passive/Events/BabakHejrani/ISC.csv') as ISCpicks:
    pickrdr=csv.reader(ISCpicks,delimiter=',')
    for pick in pickrdr:
        st=pick[0].strip()
        if st=='#ISC': #ignore event metadata
            continue
        ph=pick[7].strip()
        dist=float(pick[-1].strip())
        if ph=='P' and dist > 10:
            Sctr+=1
Exemple #13
0
    st.merge(fill_value=0)

    ## Read in metadata ##
    sp = Parser()
    if net == 'XX':
        print(st[0].id)
        for tr in st:
            if tr.stats.channel == 'LDO':
                continue
            else:
                stri = tr.id
                inv = read_inventory('/home/aalejandro/Pressure/RESP/RESP.' +
                                     stri)
                st.attach_response(inv)
    else:
        client = Client('IRIS')
        inv = client.get_stations(network=net,
                                  station=sta,
                                  starttime=stime,
                                  endtime=etime,
                                  channel="LH*",
                                  level='response')
    if debug:
        print(inv)

    # Convert to velocity
    st.attach_response(inv)

    # We now have all the data that is either LH or LDO
    if debug:
        print(inv)
Exemple #14
0
###########################################################

from obspy import read_inventory, read_events
from obspy.clients.fdsn.client import Client
from obspy.core.event.catalog import Catalog
from obspy.core.inventory.network import Network
from obspy.core.inventory.inventory import Inventory
from obspy.core import UTCDateTime
from datetime import timedelta
from mpl_toolkits.basemap import Basemap
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt

iris = Client("IRIS")
t2 = UTCDateTime.now()
t1 = t2 - timedelta(days=30)

cat = Catalog()
cat2 = Catalog()

try:
    cat += iris.get_events(starttime=t1,
                           endtime=t2,
                           latitude=YOUR_LATITUDE,
                           longitude=YOUR_LONGITUDE,
                           maxradius=15)
except:
    pass
Exemple #15
0
import pyasdf
from obspy.clients.fdsn.client import Client
from obspy.clients.fdsn.header import FDSNException
from obspy import UTCDateTime
import os

starttime = UTCDateTime("2012-10-01T01:00:00")
endtime = UTCDateTime("2012-10-02T01:00:00")

output_file = "/g/data/ha3/US_test.h5"

temp_sta = "249A"
perm_sta = "255A"

client = Client("IRIS")

ref_inv = client.get_stations(network="TA",
                              station=perm_sta,
                              channel="BHZ",
                              starttime=starttime,
                              endtime=endtime,
                              level='channel')

print(ref_inv)

temp_inv = client.get_stations(network="TA",
                               station=temp_sta,
                               channel="BHZ",
                               starttime=starttime,
                               endtime=endtime,
                               level='channel')
def fdsnws2geomag():
    '''Convert fdsnws query to geomagnetic data file'''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['internet', 'iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument('--output',
                        default=sys.stdout,
                        help='Output file (default: stdout).')
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station', required=True, help='Station code')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        ",".join(DEFAULT_CHANNELS))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    logging.info("Writing informtion to %s", str(args.output))
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream.merge_by_location().trim(starttime,
                                    endtime).write(args.output,
                                                   format=args.format,
                                                   inventory=inventory)
class Downloader:

    def __init__(self,df,station,outdir):

        self.station = station
        self.data = df
        self.out = outdir
        # self.summary = [] # list to hold all tr_ids
        # print(self.data)
#           Resets indexing of DataFrame

        # print('{}/{}_downloaded_streams.txt'.format(outdir,outdir.split('/')[-1]))
        try:
            #print('Make /Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}'.format(station))
            os.mkdir('{}/{}'.format(self.out,station))
        except FileExistsError:
            print('It already exists, Hooray! Less work for me!')

            # pass
    #   Made

        #self.outfile = open('/Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}/{}_downloaded_streams_Jacks_Split.txt'.format(station,station),'w+')

        self.attempts = 0 #Counter for how many attempted downloads there were
        self.fdsnx = 0 #Counter for how many attempts hit a FDSNNoDataException
        self.dwn = 0 #Counter for how many events were downloaded
        self.ex = 0 #Counter for how many event already exist in filesystem and therefore werent downloaded
        self.ts = 0 #Counter for events who;s traces are too short.
        self.fdsnclient_evt = Client('IRIS') # Serparate client for events (hopefully to get round the "no event avialbel bug")
        self.fdsnclient = Client('IRIS')
#       Download Station Data

    def download_station_data(self):
        """
        Download or read important station data and make sure it is right
        """
        try:
            stat =  self.fdsnclient.get_stations(channel='BH?',station='{}'.format(self.station))
            self.network = stat.networks[0].code
            self.stla = stat.networks[0].stations[0].latitude
            self.stlo = stat.networks[0].stations[0].longitude
            # print(self.network)
            return True
        except FDSNNoDataException:
            return False

    def set_event_data(self,i,sep):
        """
        Function to download event information so we can get mroe accurate start times
        """
        self.evla = self.data.EVLA[i]
        self.evlo = self.data.EVLO[i]
        if sep is False:

            self.date = self.data.DATE[i]
            if 'TIME' in self.data.columns:
                self.time = self.data.TIME[i]
            else:
                self.time = '0000'

            datetime = str(self.date) + "T" + self.time #Combined date and time inputs for converstion t UTCDateTime object
            self.start = obspy.core.UTCDateTime(datetime)

            try:
                if 'TIME' in self.data.columns:
                    end = self.start + 60
                    print('Search starts {} , ends at {}'.format(self.start,end))
                    cat = self.fdsnclient_evt.get_events(starttime=self.start,endtime=self.start+86400 ,latitude=self.evla,longitude=self.evlo,maxradius=0.25,minmag=5.5) #Get event in order to get more accurate event times.
                    # self.time = '{:02d}{:02d}{:02d}'.format(cat[0].origins[0].time.hour,cat[0].origins[0].time.minute,cat[0].origins[0].time.second)
                else:
                    # No Time so we need to search over the whole day
                    end = self.start + 86400

                    print('Search starts {} , ends at {}'.format(self.start,end))
                    cat = self.fdsnclient_evt.get_events(starttime=self.start,endtime=self.start+86400 ,latitude=self.evla,longitude=self.evlo,maxradius=0.25,minmag=5.5) #Get event in order to get more accurate event times.
                if len(cat) > 1:
                    print("WARNING: MORE THAN ONE EVENT OCCURS WITHIN 5km Search!!")
                    print('Selecting Event with the largest magnitude')
                    # Select biggest magnitude
                    max_mag = max([cat[j].magnitudes[0].mag for j in [i for i,c in enumerate(cat)]])
                    cat = cat.filter('magnitude >= {}'.format(max_mag))
                    print(cat)

                self.time = '{:02d}{:02d}{:02d}'.format(cat[0].origins[0].time.hour,cat[0].origins[0].time.minute,cat[0].origins[0].time.second)
                self.start.minute = cat[0].origins[0].time.minute
                self.start.hour = cat[0].origins[0].time.hour
                print(self.time)

                self.start.second = cat[0].origins[0].time.second

                # Lines commented out as they are only needed if TIME is prvoided as hhmm (For Deng's events there is
                # no TIME provided so we just have to used the event time downloaded)
                # if self.start.minute != cat[0].origins[0].time.minute:
                #     self.time = self.time[:2] + str(cat[0].origins[0].time.minute) # Time is hhmm so we subtract the old minute value and add the new one

                dep = cat[0].origins[0].depth
                if dep is not None:
                    self.evdp = dep/1000.0 # divide by 1000 to convert depth to [km[]
                else:
                    self.evdp = 10.0 #Hard code depth to 10.0 km if evdp cannot be found
            except FDSNNoDataException:
                print("No Event Data Available")
                self.evdp = 0
            except FDSNException:
                print("FDSNException for get_events")
                # pass
        elif sep is True:
            self.start = obspy.core.UTCDateTime('{}'.format(self.data.DATE[i])) #iso8601=True
            self.date = '{:04d}{:03d}'.format(self.start.year,self.start.julday)
            self.time = '{:02d}{:02d}{:02d}'.format(self.start.hour,self.start.minute,self.start.second)
            self.evdp = self.data.EVDP[i]

    def download_traces(self,ch):
        """
        Function that downloads the traces for a given event and station
        """
        # if len(self.time) is 6:
        print('Start: {}. self.time: {}'.format(self.start,self.time))
        tr_id = "{}/{}/{}_{}_{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,ch)
        # elif len(self.time) is 4:
            # tr_id = "{}/{}/{}_{}_{}{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,self.start.second,ch)
        # print("Looking for :", tr_id)


        if ch == 'BHE':
            self.attempts += 1 # Counts the number of traces that downloads are attempted for

        if os.path.isfile(tr_id) == True:
            print("{} exists. It was not downloaded".format(tr_id)) # File does not exist

            if ch == 'BHE':
                out_id = '_'.join(tr_id.split('_')[0:-1])
                self.outfile.write('{}_\n'.format(out_id))
                # self.summary.append(out_id)
                self.ex += 1
        else:
            # print("It doesnt exists. Download attempted")
            st = obspy.core.stream.Stream() # Initialises our stream variable

            if self.network is 'BK':
                download_client = obspy.clients.fdsn.Client('NCEDC')
            else:
                download_client = obspy.clients.fdsn.Client('IRIS')
            try:
                st = download_client.get_waveforms(self.network,self.station,'??',ch,self.start,self.start + 3000,attach_response=True)
                # print(st)
                if len(st) > 3:
                    print("WARNING: More than three traces downloaded for event ", tr_id)
                elif len(st) < 3:
                    self.ts += 1

                dist_client = iris.Client() # Creates client to calculate event - station distance
                print('STLA {} STLO {} EVLA {} EVLO {}'.format(self.stla,self.stlo,self.evla,self.evlo))
                self.d = dist_client.distaz(stalat=self.stla,stalon=self.stlo,evtlat=self.evla,evtlon=self.evlo)
                print('Source-Reciever distance is {}'.format(self.d['distance']))
                if (self.d['distance'] >= 85.0) or (self.d['distance'] >=145.0):
                
                        if st[0].stats.endtime - st[0].stats.starttime >= 2000:
                            # print('Record length is {}, which is ok'.format(st[0].stats.endtime - st[0].stats.starttime))
                            self.write_st(st,tr_id)

                            if ch == 'BHE':
                                self.dwn += 1
                                out_id = '_'.join(tr_id.split('_')[0:-1])
                                self.outfile.write('{}_\n'.format(out_id))
                                # self.summary.append(out_id)

                        else:
                            print('Record length is {}, which is too short'.format(st[0].stats.endtime - st[0].stats.starttime))
                            if ch == 'BHE':
                                self.ts += 1
                else:
                    print("Source Reciever Distance is too small")
                    if ch == 'BHE':
                        self.ts += 1
            except FDSNException:
                print('No Data Exception??')
                if ch == 'BHE':
                    self.fdsnx += 1

    def write_st(self,st,tr_id):
        """

        """
        # print('Writing {}'.format(tr_id))
        st[0].write('holder.sac', format='SAC',) # Writes traces as SAC files
        #st.plot()
        st_2 = obspy.core.read('holder.sac')
        #sac = AttribDict() # Creates a dictionary sacd to contain all the header information I want.
        ## Set origin times
        st_2[0].stats.sac.nzyear = self.start.year
        st_2[0].stats.sac.nzjday = self.start.julday
        st_2[0].stats.sac.nzhour = self.start.hour
        st_2[0].stats.sac.nzmin = self.start.minute
        st_2[0].stats.sac.nzsec = self.start.second
        st_2[0].stats.sac.nzmsec = self.start.microsecond
        ## Station Paramters
        st_2[0].stats.sac.stla = self.stla
        st_2[0].stats.sac.stlo = self.stlo
        ## Event Paramters
        st_2[0].stats.sac.evla = self.evla#cat[0].origins[0].latitude # Event latitude
        st_2[0].stats.sac.evlo = self.evlo#cat[0].origins[0].longitude # Event longitude
        st_2[0].stats.sac.evdp = self.evdp#cat[0].origins[0].depth/1000 # Event depth
        st_2[0].stats.sac.kstnm = '{:>8}'.format(self.station)
        # print('stla = {}, stlo = {}, evla = {}, evlo = {}'.format(stla,stlo,evla,evlo))


        st_2[0].stats.sac.gcarc = self.d['distance'] # d.values returns the values from dictionary d produced by distaz. list converts this to a list attribute which can then be indexed to extract the great cricle distance in degrees
        st_2[0].stats.sac.dist = self.d['distancemeters']/1000 # Distnace in kilometers
        st_2[0].stats.sac.baz = self.d['backazimuth'] # Backzimuth (Reciever - SOurce)
        st_2[0].stats.sac.az = self.d['azimuth'] # Azimuth (Source - Receiver)
        st_2[0].write(tr_id, format='SAC',byteorder=1)
Exemple #18
0
 def get_events(self, startdate, enddate, add2dbase=True, gcmt=False, Mmin=5.5, Mmax=None,
         minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, latitude=None, longitude=None,\
         minradius=None, maxradius=None, mindepth=None, maxdepth=None, magnitudetype=None, outquakeml=None):
     """Get earthquake catalog from IRIS server
     =======================================================================================================
     ::: input parameters :::
     startdate, enddate  - start/end date for searching
     Mmin, Mmax          - minimum/maximum magnitude for searching                
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     mindepth            - Limit to events with depth, in kilometers, larger than the specified minimum.
     maxdepth            - Limit to events with depth, in kilometers, smaller than the specified maximum.
     magnitudetype       - Specify a magnitude type to use for testing the minimum and maximum limits.
     =======================================================================================================
     """
     starttime   = obspy.core.utcdatetime.UTCDateTime(startdate)
     endtime     = obspy.core.utcdatetime.UTCDateTime(enddate)
     if not gcmt:
         client  = Client('IRIS')
         try:
             catISC      = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=Mmin, maxmagnitude=Mmax, catalog='ISC',
                             minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                             latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, mindepth=mindepth,
                             maxdepth=maxdepth, magnitudetype=magnitudetype)
             endtimeISC  = catISC[0].origins[0].time
         except:
             catISC      = obspy.core.event.Catalog()
             endtimeISC  = starttime
         if endtime.julday-endtimeISC.julday >1:
             try:
                 catPDE  = client.get_events(starttime=endtimeISC, endtime=endtime, minmagnitude=Mmin, maxmagnitude=Mmax, catalog='NEIC PDE',
                             minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                             latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, mindepth=mindepth,
                             maxdepth=maxdepth, magnitudetype=magnitudetype)
                 catalog = catISC+catPDE
             except:
                 catalog = catISC
         else:
             catalog     = catISC
         outcatalog      = obspy.core.event.Catalog()
         # check magnitude
         for event in catalog:
             if event.magnitudes[0].mag < Mmin:
                 continue
             outcatalog.append(event)
     else:
         # Updated the URL on Jul 25th, 2020
         gcmt_url_old    = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/jan76_dec17.ndk'
         gcmt_new        = 'http://www.ldeo.columbia.edu/~gcmt/projects/CMT/catalog/NEW_MONTHLY'
         if starttime.year < 2005:
             print('--- Loading catalog: '+gcmt_url_old)
             cat_old     = obspy.read_events(gcmt_url_old)
             if Mmax != None:
                 cat_old = cat_old.filter("magnitude <= %g" %Mmax)
             if maxlongitude != None:
                 cat_old = cat_old.filter("longitude <= %g" %maxlongitude)
             if minlongitude != None:
                 cat_old = cat_old.filter("longitude >= %g" %minlongitude)
             if maxlatitude != None:
                 cat_old = cat_old.filter("latitude <= %g" %maxlatitude)
             if minlatitude != None:
                 cat_old = cat_old.filter("latitude >= %g" %minlatitude)
             if maxdepth != None:
                 cat_old = cat_old.filter("depth <= %g" %(maxdepth*1000.))
             if mindepth != None:
                 cat_old = cat_old.filter("depth >= %g" %(mindepth*1000.))
             temp_stime  = obspy.core.utcdatetime.UTCDateTime('2018-01-01')
             outcatalog  = cat_old.filter("magnitude >= %g" %Mmin, "time >= %s" %str(starttime), "time <= %s" %str(endtime) )
         else:
             outcatalog      = obspy.core.event.Catalog()
             temp_stime      = copy.deepcopy(starttime)
             temp_stime.day  = 1
         while (temp_stime < endtime):
             year            = temp_stime.year
             month           = temp_stime.month
             yearstr         = str(int(year))[2:]
             monstr          = monthdict[month]
             monstr          = monstr.lower()
             if year==2005 and month==6:
                 monstr      = 'june'
             if year==2005 and month==7:
                 monstr      = 'july'
             if year==2005 and month==9:
                 monstr      = 'sept'
             gcmt_url_new    = gcmt_new+'/'+str(int(year))+'/'+monstr+yearstr+'.ndk'
             try:
                 cat_new     = obspy.read_events(gcmt_url_new, format='ndk')
                 print('--- Loading catalog: '+gcmt_url_new)
             except:
                 print('--- Link not found: '+gcmt_url_new)
                 break
             cat_new         = cat_new.filter("magnitude >= %g" %Mmin, "time >= %s" %str(starttime), "time <= %s" %str(endtime) )
             if Mmax != None:
                 cat_new     = cat_new.filter("magnitude <= %g" %Mmax)
             if maxlongitude != None:
                 cat_new     = cat_new.filter("longitude <= %g" %maxlongitude)
             if minlongitude!=None:
                 cat_new     = cat_new.filter("longitude >= %g" %minlongitude)
             if maxlatitude!=None:
                 cat_new     = cat_new.filter("latitude <= %g" %maxlatitude)
             if minlatitude!=None:
                 cat_new     = cat_new.filter("latitude >= %g" %minlatitude)
             if maxdepth != None:
                 cat_new     = cat_new.filter("depth <= %g" %(maxdepth*1000.))
             if mindepth != None:
                 cat_new     = cat_new.filter("depth >= %g" %(mindepth*1000.))
             outcatalog      += cat_new
             try:
                 temp_stime.month    +=1
             except:
                 temp_stime.year     +=1
                 temp_stime.month    = 1
     try:
         self.cat    += outcatalog
     except:
         self.cat    = outcatalog
     if add2dbase:
         self.add_quakeml(outcatalog)
     if outquakeml is not None:
         self.cat.write(outquakeml, format='quakeml')
     return
Exemple #19
0
def runwphase(output_dir=None,
              server: Union[Client, str] = None,
              greens_functions_dir=settings.GREENS_FUNCTIONS,
              n_workers_in_pool=settings.WORKER_COUNT,
              processing_level=3,
              output_dir_can_exist=False,
              user=None,
              password=None,
              **kwargs) -> model.WPhaseResult:
    """
    Run wphase.

    :param greens_functions_dir: The Green data Directory.
    :param output_dir: Full file path to the output directory. **DO NOT USE
        RELATIVE PATHS**.
    :param n_workers_in_pool: Number of processors to use, (default
        :py:data:`wphase.settings.WORKER_COUNT`) specifies as many as is
        reasonable'.
    :param processing_level: Processing level.
    :param output_dir_can_exist: Can the output directory already exist?
    """

    client: Optional[
        Client] = None  # can be None if inv+waveform files are provided
    if isinstance(server, Client):
        client = server
    elif isinstance(server, str):
        if server.lower() == 'antelope':
            raise Exception('Antelope is no longer supported.')
        client = Client(server, user=user, password=password)

    # Make the output directory (fails if it already exists).
    if output_dir:
        logger.debug("Creating output directory %s", output_dir)
        try:
            os.makedirs(output_dir)
        except OSError as e:
            if e.errno != errno.EEXIST or not output_dir_can_exist:
                raise

    wphase_results = wphase_runner(output_dir, client, greens_functions_dir,
                                   n_workers_in_pool, processing_level,
                                   **kwargs)

    wphase_results.HostName = settings.HOST_NAME
    wphase_results.DataSource = client.base_url if client else "local files"

    # save the results if output_dir provided
    if output_dir:
        try:
            # TODO: Should this be done in runwphase?
            with open(os.path.join(output_dir, settings.OUTPUT_FILE_NAME),
                      'w') as of:
                print(wphase_results.json(indent=2), file=of)
        except Exception as e:
            # not sure how we would get here, but we just don't want
            # to stop the rest of processing
            logger.exception("Failed dumping result to JSON.")

    # re-raise any errors from the dark side
    if wphase_results.Error:
        raise Exception(wphase_results.StackTrace)

    return wphase_results
Exemple #20
0
                        fragString += word.capitalize(
                        ) + punct + " "  # capitalize it

                cap = 1
                if not fragString[0].isalpha():
                    cap = 2

                outString += (fragString[:cap].upper() +
                              fragString[cap:]).strip() + " "

    return (outString[:1].upper() + outString[1:]
            ).strip()  # Capitalize first letter and strip trailing space


print('%s%% - Importing IRIS FDSN client...' % (p1))
iris = Client("IRIS")
t2 = UTCDateTime.now()
t2str = t2.strftime('%Y-%m-%d %H:%M UTC')
t1 = t2 - timedelta(days=DURATION)

cat = Catalog()
nrcat = Catalog()
cat2 = Catalog()

####### LOCAL ########

try:
    print('%s%% - Getting local earthquakes within %s degrees from IRIS...' %
          (p2, LOCAL_RADIUS))
    cat += iris.get_events(starttime=t1,
                           endtime=t2,
Exemple #21
0
 def __init__(self, stream):
     self.stream = stream
     client = Client("IRIS")
     self.inv = client.get_stations(network=stream.traces[0].stats.network,
                                    station=stream.traces[0].stats.station,
                                    level='response')
Exemple #22
0
from obspy.clients.fdsn.client import Client
import obspy

client=Client('IRIS')
starttime=obspy.core.utcdatetime.UTCDateTime('2011-12-01')
endtime=obspy.core.utcdatetime.UTCDateTime('2011-12-31')
cat = client.get_events(starttime=starttime, endtime=endtime, minmagnitude=5.5, catalog='ISC', magnitudetype='mb')
cat
Exemple #23
0
from obspy.clients.fdsn.client import Client
import obspy

client = Client('IRIS')
starttime = obspy.core.utcdatetime.UTCDateTime('2011-12-01')
endtime = obspy.core.utcdatetime.UTCDateTime('2011-12-31')
cat = client.get_events(starttime=starttime,
                        endtime=endtime,
                        minmagnitude=5.5,
                        catalog='ISC',
                        magnitudetype='mb')
cat
Exemple #24
0
 def test_to_pyrocko_events(self):
     from obspy.clients.fdsn.client import Client
     client = Client('IRIS')
     cat = client.get_events(eventid=609301)
     events = cat.to_pyrocko_events()
     self.assertEqual(len(events), len(cat))
Exemple #25
0
 def test_to_pyrocko_events(self):
     from obspy.clients.fdsn.client import Client
     client = Client('IRIS')
     cat = client.get_events(eventid=609301)
     events = cat.to_pyrocko_events()
     self.assertEqual(len(events), len(cat))
Exemple #26
0
def preprocess():
    """
    
    This script preprocesses the MSEED files in the input directories 
    specified in the input file.
 
    
    """


    # Create output directory, if necessary

    outdir = os.path.join('data','processed')
     
    if rank == 0 and not os.path.exists(outdir):
        os.mkdir(outdir)
    if rank == 0 and cfg.verbose:
        print(cfg.__dict__)
    
    comm.Barrier()

    event_filter = None

    if cfg.gcmt_exclude:

        if rank == 0:
            c = Client()
            cata = c.get_events(starttime=UTCDateTime(cfg.gcmt_begin),
                endtime=UTCDateTime(cfg.gcmt_end),catalog='GCMT',
                minmagnitude=5.6)
    
            event_filter = get_event_filter(cata,cfg.Fs_new[-1],
                t0=UTCDateTime(cfg.gcmt_begin),
                t1=UTCDateTime(cfg.gcmt_end))

        
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        event_filter = comm.bcast(event_filter,root=0)
    
    if cfg.event_exclude_local_cat:

        local_cat = Catalog()
        
        if rank == 0:
            c = Client()
            local_cat.extend(c.get_events(
                    starttime=UTCDateTime(cfg.event_exclude_local_cat_begin),
                    endtime=UTCDateTime(cfg.event_exclude_local_cat_end),
                    #catalog=catalog,
                    minmagnitude=cfg.event_exclude_local_cat_minmag,
                    latitude=cfg.event_exclude_local_cat_lat,
                    longitude=cfg.event_exclude_local_cat_lon,
                    maxradius=cfg.event_exclude_local_cat_radius))
            print(len(local_cat),"events in local earthquake catalog.")
        # communicate event_filter (would it be better 
        # if every rank sets it up individually?)
        local_cat = comm.bcast(local_cat,root=0)

    # Create own output directory, if necessary
    rankdir = os.path.join(outdir,
        'rank_%g' %rank)
    if not os.path.exists(rankdir):
        os.mkdir(rankdir)

    
    #- Find input files
    
    content = find_files(cfg.input_dirs,
        cfg.input_format)
    if rank==0:
        print(len(content), "files found") 
    #print(content)

    # processing report file
    sys.stdout.flush()
    output_file = os.path.join(rankdir,
        'processing_report_rank%g.txt' %rank)
    
    if os.path.exists(output_file):
        ofid = open(output_file,'a')
        print('UPDATING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)
    else:
        ofid = open(output_file,'w')
        print('PROCESSING, Date:',file=ofid)
        print(time.strftime('%Y.%m.%dT%H:%M'),file=ofid)


    # select input files for this rank    
    content = content[rank::size]
    if cfg.testrun: # Only 3 files randomly selected
        indices = randint(0,len(content),3)
        content = [content[j] for j in indices]

    # Loop over input files
    for filepath in content:
        
        print('-------------------------------------',file=ofid)
        print('Attempting to process:',file=ofid)
        print(os.path.basename(filepath),file=ofid)
        
        try:
            prstr = PrepStream(filepath,ofid)
        except:
            print('** Problem opening file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        if len(prstr.stream) == 0:
            print('** No data in file, skipping: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue
        
        try:
            prstr.prepare(cfg)
        except:
           print('** Problems preparing stream: ',file=ofid)
           print('** %s' %filepath,file=ofid)
           continue
            
        try:
            prstr.process(cfg,event_filter,local_cat)
        except:
            print('** Problems processing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)
            continue

        try:
            prstr.write(rankdir,cfg)
        except:
            print('** Problems writing stream: ',file=ofid)
            print('** %s' %filepath,file=ofid)

        ofid.flush()
        
    ofid.close()

    print("Rank %g has completed processing." 
        %rank,file=None)
    
    
    try:
        os.system('mv '+rankdir+'/* '+outdir)
    except:
        pass

    os.system('rmdir '+rankdir)
Exemple #27
0
def get_iris_event_data(bulk, folder, timestr, dataless, event):
    from obspy import UTCDateTime
    from obspy.clients.fdsn.client import Client
    #from obspy.fdsn import Client
    from os import path
    from numpy import nan, isnan
    from mapping_tools import distance
    '''
    Code to extract IRIS data, one station at a time.  Exports mseed file to 
    working directory
    
    datetime tuple fmt = (Y,m,d,H,M)
    sta = station
    '''

    fdsn_client = Client("IRIS")
    #client = Client("IRIS")
    sta = []
    #st = client.get_waveforms_bulk(bulk)
    for b in bulk:
        try:
            fname = '.'.join((timestr, b[0], b[1], 'mseed'))
            fpath = path.join(folder, fname.replace(':', '.'))

            staloc = nan
            #first, check it site is in distance and azimuthal range
            for channel in ['SHZ', 'EHZ', 'BHZ', 'HHZ', 'BNZ', 'HNZ']:
                if b[0] == 'WRAB':
                    locCode = '10'
                else:
                    locCode = '00'
                seedid = '.'.join(
                    (b[0], b[1], locCode, channel))  # e.g., 'AU.DPH.00.BNZ'
                try:
                    staloc = dataless.get_coordinates(seedid, b[4])
                except:
                    a = 1  # dummy call
                seedid = '.'.join(
                    (b[0], b[1], '', channel))  # e.g., 'AU.DPH..BNZ'
                try:
                    staloc = dataless.get_coordinates(seedid, b[4])
                except:
                    a = 1  # dummy call

            # now get distance and azimuth
            rngkm, az, baz = distance(event['lat'], event['lon'],
                                      staloc['latitude'], staloc['longitude'])
            print(rngkm, az, baz)

            getRecord = False
            if rngkm <= 2000. and az > 130. and az < 230.:
                getRecord = True
            elif rngkm <= 2000. and az > 120. and az < 240. and b[1] == 'RABL':
                getRecord = True
            elif rngkm <= 2000. and az > 120. and az < 240. and b[1] == 'PMG':
                getRecord = True

            # second, check if file exists
            #print(path.isfile(fpath), getRecord)
            if not path.isfile(fpath) and getRecord == True:
                bulk2 = [(b[0], b[1], b[2], "*", b[4], b[5])]  #,
                print('B2', bulk2)
                #                         ("AU", "AFI", "1?", "BHE",  b[4], b[5])]
                client = Client("IRIS")
                #st = client.get_waveforms_bulk(bulk2)
                st = client.get_waveforms(b[0], b[1], b[2], "*", b[4], b[5])
                '''
                 st = fdsn_client.get_waveforms(network=b[0], station=b[1], location=b[2],
                                                channel=b[3], starttime=b[4], endtime=b[5],
                                                attach_response=True)
                 '''
                #print(st[0].stats.location)
                st = st.merge(method=0, fill_value='interpolate')
                sta += st

                print('Writing file: ' + fpath)
                st.write(fpath, format="MSEED")
            else:
                print('File exists:', fpath)
            #return st
        except:
            print('No data for', b[0], b[1])

    return sta
Exemple #28
0
 def download_body_waveforms(self, outdir, fskip=False, client_name='IRIS', minDelta=30, maxDelta=150, channel_rank=['BH', 'HH'],\
         phase='P', startoffset=-30., endoffset=60.0, verbose=False, rotation=True, startdate=None, enddate=None):
     """Download body wave data from IRIS server
     ====================================================================================================================
     ::: input parameters :::
     outdir          - output directory
     fskip           - flag for downloa/overwrite
                         False   - overwrite
                         True    - skip upon existence
     min/maxDelta    - minimum/maximum epicentral distance, in degree
     channel_rank    - rank of channel types
     phase           - body wave phase to be downloaded, arrival time will be computed using taup
     start/endoffset - start and end offset for downloaded data
     rotation        - rotate the seismogram to RT or not
     =====================================================================================================================
     """
     if not os.path.isdir(outdir):
         os.makedirs(outdir)
     client          = Client(client_name)
     ievent          = 0
     Ntrace          = 0
     try:
         stime4down  = obspy.core.utcdatetime.UTCDateTime(startdate)
     except:
         stime4down  = obspy.UTCDateTime(0)
     try:
         etime4down  = obspy.core.utcdatetime.UTCDateTime(enddate)
     except:
         etime4down  = obspy.UTCDateTime()
     print('[%s] [DOWNLOAD BODY WAVE] Start downloading body wave data' %datetime.now().isoformat().split('.')[0])
     try:
         print (self.cat)
     except AttributeError:
         self.copy_catalog()
     for event in self.cat:
         event_id        = event.resource_id.id.split('=')[-1]
         pmag            = event.preferred_magnitude()
         magnitude       = pmag.mag
         Mtype           = pmag.magnitude_type
         event_descrip   = event.event_descriptions[0].text+', '+event.event_descriptions[0].type
         porigin         = event.preferred_origin()
         otime           = porigin.time
         if otime < stime4down or otime > etime4down:
             continue
         ievent          += 1
         try:
             print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0] + \
                         'Event ' + str(ievent)+': '+ str(otime)+' '+ event_descrip+', '+Mtype+' = '+str(magnitude))
         except:
             print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0] + \
                 'Event ' + str(ievent)+': '+ str(otime)+' '+ event_descrip+', M = '+str(magnitude))
         evlo            = porigin.longitude
         evla            = porigin.latitude
         try:
             evdp        = porigin.depth/1000.
         except:
             continue
         evstr           = '%s' %otime.isoformat()
         outfname        = outdir + '/' + evstr+'.mseed'
         logfname        = outdir + '/' + evstr+'.log'
         # check file existence
         if os.path.isfile(outfname):
             if fskip:
                 if os.path.isfile(logfname):
                     os.remove(logfname)
                     os.remove(outfname)
                 else:
                     continue
             else:
                 os.remove(outfname)
                 if os.path.isfile(logfname):
                     os.remove(logfname)
         elif os.path.isfile(logfname):
             try:
                 with open(logfname, 'r') as fid:
                     logflag     = fid.readline().split()[0][:4]
                 if logflag == 'DONE' and fskip:
                     continue
             except:
                 pass 
         # initialize log file
         with open(logfname, 'w') as fid:
             fid.writelines('DOWNLOADING\n')
         out_stream      = obspy.Stream()
         itrace          = 0
         for staid in self.waveforms.list():
             netcode, stacode    = staid.split('.')
             with warnings.catch_warnings():
                 warnings.simplefilter("ignore")
                 tmppos          = self.waveforms[staid].coordinates
             stla                = tmppos['latitude']
             stlo                = tmppos['longitude']
             elev                = tmppos['elevation_in_m']
             elev                = elev/1000.
             az, baz, dist       = geodist.inv(evlo, evla, stlo, stla)
             dist                = dist/1000.
             if baz<0.:
                 baz             += 360.
             Delta               = obspy.geodetics.kilometer2degrees(dist)
             if Delta<minDelta:
                 continue
             if Delta>maxDelta:
                 continue
             arrivals            = taupmodel.get_travel_times(source_depth_in_km=evdp, distance_in_degree=Delta, phase_list=[phase])#, receiver_depth_in_km=0)
             try:
                 arr             = arrivals[0]
                 arrival_time    = arr.time
                 rayparam        = arr.ray_param_sec_degree
             except IndexError:
                 continue
             starttime           = otime + arrival_time + startoffset
             endtime             = otime + arrival_time + endoffset
             with warnings.catch_warnings():
                 warnings.simplefilter("ignore")
                 location        = self.waveforms[staid].StationXML[0].stations[0].channels[0].location_code
             # determine type of channel
             channel_type        = None
             for tmpch_type in channel_rank:
                 channel         = '%sE,%sN,%sZ' %(tmpch_type, tmpch_type, tmpch_type)
                 try:
                     st          = client.get_waveforms(network=netcode, station=stacode, location=location, channel=channel,
                                         starttime=starttime, endtime=endtime, attach_response=True)
                     if len(st) >= 3:
                         channel_type= tmpch_type
                         break
                 except:
                     pass
             if channel_type is None:
                 if verbose:
                     print ('--- No data for:', staid)
                 continue
             pre_filt            = (0.04, 0.05, 20., 25.)
             st.detrend()
             try:
                 st.remove_response(pre_filt=pre_filt, taper_fraction=0.1)
             except ValueError:
                 print ('!!! ERROR with response removal for:', staid)
                 continue 
             if rotation:
                 try:
                     st.rotate('NE->RT', back_azimuth=baz)
                 except:
                     continue
             if verbose:
                 print ('--- Getting data for:', staid)
             # append stream
             out_stream  += st
             itrace      += 1
             Ntrace      += 1
         # save data to miniseed
         if itrace != 0:
             out_stream.write(outfname, format = 'mseed', encoding = 'FLOAT64')
             os.remove(logfname) # delete log file
         else:
             with open(logfname, 'w') as fid:
                 fid.writelines('DONE\n')
         print('[%s] [DOWNLOAD BODY WAVE] ' %datetime.now().isoformat().split('.')[0]+\
               'Event ' + str(ievent)+': dowloaded %d traces' %itrace)
     print('[%s] [DOWNLOAD BODY WAVE] All done' %datetime.now().isoformat().split('.')[0] + ' %d events, %d traces' %(ievent, Ntrace))
     return
Exemple #29
0
import numpy as np

from obspy.core import UTCDateTime
from obspy.core.stream import Stream


from getwave import getWave

import csv

#initialise IRIS client to query if the desired channel is not in our database.
#this client needs to be initialised here because the initialiser spawns multiple threads.
#this is forbidden on import in Python 2 so it cannot be initialised in the getwave module
from obspy.clients.fdsn.client import Client
from obspy.geodetics.base import gps2dist_azimuth as distaz
irisclient=Client("IRIS")
#load ISC pick catalogue CSV

saveDir="/g/data/ha3/rlt118/neural-datasets/categoriser-teleseismic/smallset/"

Sctr=0
wfctr=0
with open('/g/data/ha3/Passive/Events/BabakHejrani/ISC.csv') as ISCpicks:
    pickrdr=csv.reader(ISCpicks,delimiter=',')
    event=""
    for pick in pickrdr:
        st=pick[0].strip()
        if st=='#ISC': #store event metadata then don't try to process this as a pick
            event=pick
            evlat=float(event[6].strip())
            evlong=float(event[7].strip())
Exemple #30
0
    # cut off microseconds
    date = UTCDateTime(input.partition(".")[0])

    # create working dir folder respective to the event datetime
    working_dir = os.path.join(cfg.output_dir, date.strftime("%Y%m%d%H%M%S"))

    if os.path.exists(working_dir):
        logging.info("Working directory already exists, thus event (" +
                     str(input) + ") is already calculated! Exiting...")

    else:
        logging.info("Creating working directory (" + working_dir + ")...")
        os.makedirs(working_dir)

        logging.info("Connecting to FDSN Server:  " + cfg.fdsnws_url + "...")
        fdsn = Client(cfg.fdsnws_url)

        if cfg.sds_url:
            logging.info("Connecting to SDS archive:  " + cfg.sds_url + "...")
            sds = Client_SDS(cfg.sds_url)

        logging.info("Retrieving event's info...")

        # get events info
        # catalog object is returned and
        # the first element -event object- is retrieved
        # get the event between 1 second from
        # the input event time
        # if two events occur between 1 second
        # it will just get the first one
        event = fdsn.get_events(
Exemple #31
0
def makeStationList(json_path,
                    client_list,
                    min_lat,
                    max_lat,
                    min_lon,
                    max_lon,
                    start_time,
                    end_time,
                    channel_list=[],
                    filter_network=[],
                    filter_station=[],
                    **kwargs):
    """
    
    Uses fdsn to find available stations in a specific geographical location and time period.  

    Parameters
    ----------
    json_path: str
        Path of the json file that will be returned

    client_list: list
        List of client names e.g. ["IRIS", "SCEDC", "USGGS"].
                                
    min_lat: float
        Min latitude of the region.
        
    max_lat: float
        Max latitude of the region.
        
    min_lon: float
        Min longitude of the region.
        
    max_lon: float
        Max longitude of the region.
        
    start_time: str
        Start DateTime for the beginning of the period in "YYYY-MM-DDThh:mm:ss.f" format.
        
    end_time: str
        End DateTime for the beginning of the period in "YYYY-MM-DDThh:mm:ss.f" format.
        
    channel_list: str, default=[]
        A list containing the desired channel codes. Downloads will be limited to these channels based on priority. Defaults to [] --> all channels
        
    filter_network: str, default=[]
        A list containing the network codes that need to be avoided. 
        
    filter_station: str, default=[]
        A list containing the station names that need to be avoided.

    kwargs: 
        special symbol for passing Client.get_stations arguments

    Returns
    ----------
    stations_list.json: A dictionary containing information for the available stations.      
        
     """

    station_list = {}
    for cl in client_list:
        inventory = Client(cl).get_stations(minlatitude=min_lat,
                                            maxlatitude=max_lat,
                                            minlongitude=min_lon,
                                            maxlongitude=max_lon,
                                            starttime=UTCDateTime(start_time),
                                            endtime=UTCDateTime(end_time),
                                            level='channel',
                                            **kwargs)

        for ev in inventory:
            net = ev.code
            if net not in filter_network:
                for st in ev:
                    station = st.code
                    print(str(net) + "--" + str(station))

                    if station not in filter_station:

                        elv = st.elevation
                        lat = st.latitude
                        lon = st.longitude
                        new_chan = [ch.code for ch in st.channels]
                        if len(channel_list) > 0:
                            chan_priority = [ch[:2] for ch in channel_list]

                            for chnn in chan_priority:
                                if chnn in [ch[:2] for ch in new_chan]:
                                    new_chan = [
                                        ch for ch in new_chan if ch[:2] == chnn
                                    ]

# =============================================================================
#                      if ("BHZ" in new_chan) and ("HHZ" in new_chan):
#                          new_chan = [ch for ch in new_chan if ch[:2] != "BH"]
#                      if ("HHZ" in new_chan) and ("HNZ" in new_chan):
#                          new_chan = [ch for ch in new_chan if ch[:2] != "HH"]
#
#                          if len(new_chan)>3 and len(new_chan)%3 != 0:
#                              chan_type = [ch for ch in new_chan if ch[2] == 'Z']
#                              chan_groups = []
#                              for i, cht in enumerate(chan_type):
#                                  chan_groups.append([ch for ch in new_chan if ch[:2] == cht[:2]])
#                              new_chan2 = []
#                              for chg in chan_groups:
#                                  if len(chg) == 3:
#                                      new_chan2.append(chg)
#                              new_chan = new_chan2
# =============================================================================

                        if len(new_chan) > 0 and (station not in station_list):
                            station_list[str(station)] = {
                                "network": net,
                                "channels": list(set(new_chan)),
                                "coords": [lat, lon, elv]
                            }
    json_dir = os.path.dirname(json_path)
    if not os.path.exists(json_dir):
        os.makedirs(json_dir)
    with open(json_path, 'w') as fp:
        json.dump(station_list, fp)
Exemple #32
0
 def read_TA_lst(self, infname, startdate=None, enddate=None,  startbefore=None, startafter=None, endbefore=None, endafter=None, location=None, channel=None,\
         includerestricted=False, minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, \
         latitude=None, longitude=None, minradius=None, maxradius=None):
     """Get station inventory from IRIS server
     =======================================================================================================
     Input Parameters:
     startdate, enddata  - start/end date for searching
     network             - Select one or more network codes.
                             Can be SEED network codes or data center defined codes.
                                 Multiple codes are comma-separated (e.g. "IU,TA").
     station             - Select one or more SEED station codes.
                             Multiple codes are comma-separated (e.g. "ANMO,PFO").
     location            - Select one or more SEED location identifiers.
                             Multiple identifiers are comma-separated (e.g. "00,01").
                             As a special case ?--? (two dashes) will be translated to a string of two space
                             characters to match blank location IDs.
     channel             - Select one or more SEED channel codes.
                             Multiple codes are comma-separated (e.g. "BHZ,HHZ").
     includerestricted   - default is False
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     =======================================================================================================
     """
     try:
         starttime = obspy.core.utcdatetime.UTCDateTime(startdate)
     except:
         starttime = None
     try:
         endtime = obspy.core.utcdatetime.UTCDateTime(enddate)
     except:
         endtime = None
     try:
         startbefore = obspy.core.utcdatetime.UTCDateTime(startbefore)
     except:
         startbefore = None
     try:
         startafter = obspy.core.utcdatetime.UTCDateTime(startafter)
     except:
         startafter = None
     try:
         endbefore = obspy.core.utcdatetime.UTCDateTime(endbefore)
     except:
         endbefore = None
     try:
         endafter = obspy.core.utcdatetime.UTCDateTime(endafter)
     except:
         endafter = None
     client = Client('IRIS')
     init_flag = True
     with open(infname, 'rb') as fio:
         for line in fio.readlines():
             network = line.split()[1]
             station = line.split()[2]
             if network == 'NET':
                 continue
             # print network, station
             if init_flag:
                 try:
                     inv     = client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, startbefore=startbefore, startafter=startafter,\
                                 endbefore=endbefore, endafter=endafter, channel=channel, minlatitude=minlatitude, maxlatitude=maxlatitude, \
                                     minlongitude=minlongitude, maxlongitude=maxlongitude, latitude=latitude, longitude=longitude, minradius=minradius, \
                                         maxradius=maxradius, level='channel', includerestricted=includerestricted)
                 except:
                     print('No station inv: ', line)
                     continue
                 init_flag = False
                 continue
             try:
                 inv     += client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, startbefore=startbefore, startafter=startafter,\
                             endbefore=endbefore, endafter=endafter, channel=channel, minlatitude=minlatitude, maxlatitude=maxlatitude, \
                                 minlongitude=minlongitude, maxlongitude=maxlongitude, latitude=latitude, longitude=longitude, minradius=minradius, \
                                     maxradius=maxradius, level='channel', includerestricted=includerestricted)
             except:
                 print('No station inv: ', line)
                 continue
     self.add_stationxml(inv)
     try:
         self.inv += inv
     except:
         self.inv = inv
     return