Ejemplo n.º 1
0
def irisws_pick(dbsession=None,
                picker=None,
                inventory=None,
                starttime=None,
                endtime=None,
                t_chunk=1200,
                channel_codes=['EH', 'BH', 'HH'],
                overlap=30.):
    """  Each time chunk is overlapped some and so is technically longer than each t_chunk by that amount"""
    irisclient = iris.Client()
    inv = inventory.select(starttime=starttime, endtime=endtime)
    for net in inv:
        network = net.code
        for sta in net:
            for ch in sta:
                for cc in channel_codes:
                    if re.match(cc, ch.code):
                        tlast = starttime
                        while tlast < endtime:  #Loop through all the time we want to pick
                            if tlast + t_chunk > endtime:
                                end = endtime
                            else:
                                end = tlast + t_chunk
                                #We decimate the data as this is an example this will keep memory usage down
                                try:
                                    st = irisclient.timeseries(
                                        network, sta.code, ch.location_code,
                                        ch.code, tlast - 30, end)
                                    logging.info(str(st))
                                except:
                                    logging.info("%s.%s.%s.%s not available" %
                                                 (network, sta.code,
                                                  ch.location_code, ch.code))
                                if len(st) > 0:  #Make sure we got data
                                    for tr in st:  # For each data segment run the picker
                                        tr.detrend('linear')
                                        scnl, picks, polarity, snr, uncert = picker.picks(
                                            tr)
                                        t_create = datetime.utcnow(
                                        )  # Record the time we made the picks
                                        # Add each pick to the database
                                        for i in range(len(picks)):
                                            new_pick = tables1D.Pick(
                                                scnl, picks[i].datetime,
                                                polarity[i], snr[i], uncert[i],
                                                t_create)
                                            dbsession.add(
                                                new_pick
                                            )  # Add pick i to the database
                                        dbsession.commit(
                                        )  # Commit the pick to the database

                            tlast = end
def get_waveforms(sta_list, OT, OLon, OLat, ODep, OMag):
    client = Client("IRIS")
    bulk = []
    
    for sta in sta_list:
        # P波到時
        client1 = iris.Client()
        result = client1.traveltime(phases=['p'], evloc=(OLat, OLon),
            staloc=[(sta['latitude'], sta['longitude'])], evdepth=ODep)
        # print(result.decode())
        r = result.decode().split()
        p_arrival = OT + float(r[27])
        print(p_arrival)

        b = (sta['network'], sta['station'], "*", "BH?", OT, OT+300)
        bulk.append(b)

    # print(len(bulk))
    
    if bulk:
        data = client.get_waveforms_bulk(bulk, attach_response=True)
        data.remove_response(output="ACC")
        data.resample(100.0)
        print(data)
Ejemplo n.º 3
0
Usage: python example_ktpicker.py
"""
import sys

sys.path.append("../")
from phasepapy.phasepicker import ktpicker
from obspy.core import *
import obspy.clients.iris as iris

# =======================================================================
# KTpicker example
# Load data into an Obspy Stream
# Obspy version dependent check the documentation for your version
wfstart = UTCDateTime(2016, 6, 9, 11, 11, 28)
iris_client = iris.Client()

st = iris_client.timeseries("OK", "CROK", "--", "HHZ", wfstart,
                            wfstart + 10 * 60)  # Get ten minutes of data
st.merge()  # Ensure that traces aren't split
tr = st[0]
tr.detrend('linear')  # Perform a linear detrend on the data

chenPicker = ktpicker.KTPicker(t_win=1,
                               t_ma=10,
                               nsigma=6,
                               t_up=0.78,
                               nr_len=2,
                               nr_coeff=2,
                               pol_len=10,
                               pol_coeff=10,
Ejemplo n.º 4
0
 ids = []
 for tr in st:
     ids.append(tr.id)
 ids = set(ids)
 #Write each channel to its own file
 if not files_exist:
     for ch in ids:
         #print(st)
         stch = st.select(id=ch)
         #print(stch)
         filename = "%s%d/%03d/%s.seed" % (datadir, day.year, day.julday,
                                           ch)
         path_verify(filename)
         stch.write(filename, format='MSEED', reclen=512)
 # Make sure our resp files are up to date
 irisclient = iris.Client()
 for ch in ids:
     n, s, loc, chan = ch.split('.')
     try:
         resp = irisclient.resp(
             network,
             station,
             location=loc,
             channel=chan,
             starttime=UTCDateTime('2004-001T00:00:00.0'),
             endtime=day + secperday,
             filename=respfilename(ch))
         resp = irisclient.evalresp(network,
                                    station,
                                    loc,
                                    chan,
Ejemplo n.º 5
0
    def download_traces(self,ch):
        """
        Function that downloads the traces for a given event and station
        """
        # if len(self.time) is 6:
        print('Start: {}. self.time: {}'.format(self.start,self.time))
        tr_id = "{}/{}/{}_{}_{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,ch)
        # elif len(self.time) is 4:
            # tr_id = "{}/{}/{}_{}_{}{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,self.start.second,ch)
        # print("Looking for :", tr_id)


        if ch == 'BHE':
            self.attempts += 1 # Counts the number of traces that downloads are attempted for

        if os.path.isfile(tr_id) == True:
            print("{} exists. It was not downloaded".format(tr_id)) # File does not exist

            if ch == 'BHE':
                out_id = '_'.join(tr_id.split('_')[0:-1])
                self.outfile.write('{}_\n'.format(out_id))
                # self.summary.append(out_id)
                self.ex += 1
        else:
            # print("It doesnt exists. Download attempted")
            st = obspy.core.stream.Stream() # Initialises our stream variable

            if self.network is 'BK':
                download_client = obspy.clients.fdsn.Client('NCEDC')
            else:
                download_client = obspy.clients.fdsn.Client('IRIS')
            try:
                st = download_client.get_waveforms(self.network,self.station,'??',ch,self.start,self.start + 3000,attach_response=True)
                # print(st)
                if len(st) > 3:
                    print("WARNING: More than three traces downloaded for event ", tr_id)
                elif len(st) < 3:
                    self.ts += 1

                dist_client = iris.Client() # Creates client to calculate event - station distance
                print('STLA {} STLO {} EVLA {} EVLO {}'.format(self.stla,self.stlo,self.evla,self.evlo))
                self.d = dist_client.distaz(stalat=self.stla,stalon=self.stlo,evtlat=self.evla,evtlon=self.evlo)
                print('Source-Reciever distance is {}'.format(self.d['distance']))
                if (self.d['distance'] >= 85.0) or (self.d['distance'] >=145.0):
                
                        if st[0].stats.endtime - st[0].stats.starttime >= 2000:
                            # print('Record length is {}, which is ok'.format(st[0].stats.endtime - st[0].stats.starttime))
                            self.write_st(st,tr_id)

                            if ch == 'BHE':
                                self.dwn += 1
                                out_id = '_'.join(tr_id.split('_')[0:-1])
                                self.outfile.write('{}_\n'.format(out_id))
                                # self.summary.append(out_id)

                        else:
                            print('Record length is {}, which is too short'.format(st[0].stats.endtime - st[0].stats.starttime))
                            if ch == 'BHE':
                                self.ts += 1
                else:
                    print("Source Reciever Distance is too small")
                    if ch == 'BHE':
                        self.ts += 1
            except FDSNException:
                print('No Data Exception??')
                if ch == 'BHE':
                    self.fdsnx += 1
Ejemplo n.º 6
0
    station_coordinates = []
    for network in inventory:
        for station in network:
            for channel in station:
                if channel.code=='LH1':
                    station_coordinates.append((network.code, station.code, 
                                            station.latitude, station.longitude, 
                                            station.elevation,channel.azimuth,channel.location_code))

# then for each station in the list get the distance and azimuth
# need to think about what source-receiver distances we want to use
# for p-waves.
# pick a model for estimating arrival times
    print("calculating travel times and requesting data")
    model = TauPyModel(model="iasp91")
    irisClient=iris.Client()
    for station in station_coordinates:
# first calculate the source-receiver distance
        DegDist = locations2degrees(eventLat, eventLon,
                                    station[2], station[3])
# need to add tolerance for distance so that we are only using P-arrivals
# need to talk to tyler about which P should be used?  P? Pdiff? pP? PP?
# tyler also feels we really want a direct P at teleseismic distances, so 
# let us start with 25-90
        if DegDist > 25 and DegDist < 90:
            print("Station "+station[1]+" will have a P-wave arrival")
            StationAziExpec = gps2dist_azimuth(eventLat, eventLon,
                                               station[2], station[3]) 
            print("station lat, lon:"+str(station[2])+","+str(station[3]))
            statBaz = StationAziExpec[2]
            print("The expected back azimuth for "+station[1]+" is "+str(statBaz))
Ejemplo n.º 7
0
def trace_download(date,time,evla,evlo,evdp,stla,stlo,station,network,outfile,fdsnx,ex,dwn,ts):
    ## Function to download and save traces for a pre-determined set of events
    datetime = str(date) + "T" + str(time).zfill(4) #Combined date and time inputs for converstion t UTCDateTime object
    start = obspy.core.UTCDateTime(datetime) #iso8601=True

    client = obspy.clients.fdsn.Client('IRIS') #
    try:
        cat = client.get_events(starttime=start-60,endtime=start+60 ,latitude=evla,longitude=evlo,maxradius=0.5) #Get event in order to get more accurate event times.
        if len(cat) > 1:
            print("WARNING: MORE THAN ONE EVENT OCCURS WITHIN 5km Search!!")

        start.second = cat[0].origins[0].time.second

        if start.minute != cat[0].origins[0].time.minute:
            time = (time - start.minute) + cat[0].origins[0].time.minute # Time is hhmm so we subtract the old minute value and add the new one

    except FDSNNoDataException:
        print("No Event Data Available")
    except FDSNException:
        print("FDSNException for get_events")


    channel = ["BHN","BHZ","BHE"]
    for ch in channel:

        tr_id = "/Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}/{}_{:07d}_{:04d}{:02d}_{}.sac".format(station,station,date,time,start.second,ch)
        # print("Looking for :", id_tst)
        if os.path.isfile(tr_id) == True:
            print("It exists. It was not downloaded") # File does not exist
            if ch == 'BHE':
                outfile.write('{}\n'.format(tr_id[0:-7]))
                ex += 1
        else:
            # print("It does not exist. Download attempted")
            st = obspy.core.stream.Stream() # Initialises our stream variable
            try:
                if network is 'BK':
                    download_client = obspy.clients.fdsn.Client('NCEDC')
                else:
                    download_client = obspy.clients.fdsn.Client('IRIS')

                st = download_client.get_waveforms(network,station,'??',ch,start,start + 3000,attach_response=True)

            except FDSNNoDataException:
                print("No Event Data Available")

            except FDSNException:
                print("FDSNException for get_events")

            if len(st) > 3:
                print("WARNING: More than three traces downloaded for event ", tr_id)
            if ((st[0].stats.endtime - st[0].stats.starttime) >= 2999.0):

                st[0].write('holder.sac', format='SAC',) # Writes traces as SAC files
                #st.plot()
                st_2 = obspy.core.read('holder.sac')
                #sac = AttribDict() # Creates a dictionary sacd to contain all the header information I want.
                ## Station Paramters
                st_2[0].stats.sac.stla = stla
                st_2[0].stats.sac.stlo = stlo
                ## Event Paramters
                st_2[0].stats.sac.evla = evla#cat[0].origins[0].latitude # Event latitude
                st_2[0].stats.sac.evlo = evlo#cat[0].origins[0].longitude # Event longitude
                st_2[0].stats.sac.evdp = evdp#cat[0].origins[0].depth/1000 # Event depth
                st_2[0].stats.sac.kstnm = '{:>8}'.format(station)
                dist_client = iris.Client() # Creates client to calculate event - station distance
                # print('stla = {}, stlo = {}, evla = {}, evlo = {}'.format(stla,stlo,evla,evlo))

                d = dist_client.distaz(stalat=stla,stalon=stlo,evtlat=evla,evtlon=evlo)

                st_2[0].stats.sac.gcarc = d['distance'] # d.values returns the values from dictionary d produced by distaz. list converts this to a list attribute which can then be indexed to extract the great cricle distance in degrees
                st_2[0].stats.sac.dist = d['distancemeters']/1000 # Distnace in kilometers
                st_2[0].stats.sac.baz = d['backazimuth'] # Backzimuth (Reciever - SOurce)
                st_2[0].stats.sac.az = d['azimuth'] # Azimuth (Source - Receiver)
                st_2[0].write(tr_id, format='SAC',byteorder=1)
                # print("The trace ", tr_id, "was downloaded and saved!")
                dwn += 1
                if ch is 'BHE':
                    outfile.write('{}\n'.format(tr_id[0:-7]))
                else:
                    pass

            else:
                print('Trace is too short')
                ts +=1

    return dwn,fdsnx, ex, ts