コード例 #1
0
def generateStationTestData(sta):

    time_range = (UTCDateTime(TIME_RANGE[0]), UTCDateTime(TIME_RANGE[1]))

    client = Client("IRIS")
    inv = client.get_stations(network=NETWORK,
                              station=sta,
                              channel=CHANNEL,
                              starttime=time_range[0],
                              endtime=time_range[1],
                              level='channel')
    print(inv)

    traces = client.get_waveforms(network=NETWORK,
                                  station=sta,
                                  channel=CHANNEL,
                                  location='*',
                                  starttime=time_range[0],
                                  endtime=time_range[1])
    print(traces)

    outfile = 'test_data_' + sta + '.h5'
    asdf_out = pyasdf.ASDFDataSet(outfile, mode='w')
    asdf_out.add_stationxml(inv)
    asdf_out.add_waveforms(traces, TAG)

    print("Saved data to " + outfile)
コード例 #2
0
ファイル: ClientUtils.py プロジェクト: MarcusHaynes/hiperseis
    def queryByBBoxInterval(self, outputFileName, bbox, timeinterval, chan='*Z', bbpadding=2,
                            event_id=None, verbose=False):
        """ Time interval is a tuple (starttime,endtime)
        """
        assert len(timeinterval) == 2, "timeinterval must be a tuple of ascending timestamps. len=" + str(
            len(timeinterval)) + " " + str(timeinterval)

        query_ds = pyasdf.ASDFDataSet(outputFileName)

        client = Client(self._client)
        ref_inv = client.get_stations(network=self._network,
                                      starttime=UTCDateTime(timeinterval[0]),
                                      endtime=UTCDateTime(timeinterval[1]),
                                      minlongitude=bbox[0] - bbpadding,
                                      maxlongitude=bbox[1] + bbpadding,
                                      minlatitude=bbox[2] - bbpadding,
                                      maxlatitude=bbox[3] + bbpadding,
                                      level='channel')

        if verbose:
            print(ref_inv)

        ref_st = Stream()

        # go through inventory and request timeseries data
        for net in ref_inv:
            for stn in net:
                stime = UTCDateTime(timeinterval[0])
                etime = UTCDateTime(timeinterval[1])
                step = 3600*24*10
                while stime + step < etime:
                    try:
                        ref_st = client.get_waveforms(network=net.code, station=stn.code,
                                                      channel=chan, location='*',
                                                      starttime=stime,
                                                      endtime=stime+step)
                        print ref_st
                        self.ref_stations.append(net.code + '.' + stn.code)
                        st_inv = ref_inv.select(station=stn.code, channel=chan)
                        
                        query_ds.add_stationxml(st_inv)
                        for tr in ref_st:
                            query_ds.add_waveforms(tr, "reference_station")
                    except FDSNException:
                        print('Data not available from Reference Station: ' + stn.code)
                    # end try
                    stime += step
                #wend
        # end for

        #tr.write(os.path.join(os.path.dirname(outputFileName), tr.id + ".MSEED"),
        #         format="MSEED") # Don't write miniseed
        if verbose:
            print("Wrote Reference Waveforms to ASDF file: " + outputFileName)
            print('\nWaveform data query completed.')

        metaOutputFileName = os.path.join(os.path.dirname(outputFileName),
                                          'meta.%s.xml'%(os.path.basename(outputFileName)))
        ref_inv.write(metaOutputFileName, format="STATIONXML")
        del query_ds
コード例 #3
0
def getXmlFromIRIS(network=None):
    if network is None:
        return
    client = Client("IRIS")
    # Use level=response to get channel information...
    # ...which holds the location code necessary for station referencing
    inv = client.get_stations(network=network, level='response')
    inv.write('./' + network + '.xml', format='stationxml', validate=True)
コード例 #4
0
 def get_stations(self, startdate=None, enddate=None,  network=None, station=None, location=None, channel=None,
         minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, latitude=None, longitude=None, minradius=None, maxradius=None):
     """Get station inventory from IRIS server
     =======================================================================================================
     Input Parameters:
     startdate, enddata  - start/end date for searching
     network             - Select one or more network codes.
                             Can be SEED network codes or data center defined codes.
                                 Multiple codes are comma-separated (e.g. "IU,TA").
     station             - Select one or more SEED station codes.
                             Multiple codes are comma-separated (e.g. "ANMO,PFO").
     location            - Select one or more SEED location identifiers.
                             Multiple identifiers are comma-separated (e.g. "00,01").
                             As a special case ?--? (two dashes) will be translated to a string of two space
                             characters to match blank location IDs.
     channel             - Select one or more SEED channel codes.
                             Multiple codes are comma-separated (e.g. "BHZ,HHZ").             
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     =======================================================================================================
     """
     try: starttime      = obspy.core.utcdatetime.UTCDateTime(startdate)
     except: starttime   = None
     try: endtime        = obspy.core.utcdatetime.UTCDateTime(enddate)
     except: endtime     = None
     client  = Client('IRIS')
     inv     = client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, channel=channel, 
                 minlatitude=minlatitude, maxlatitude=maxlatitude, minlongitude=minlongitude, maxlongitude=maxlongitude,
                     latitude=latitude, longitude=longitude, minradius=minradius, maxradius=maxradius, level='channel')
     self.add_stationxml(inv)
     try: self.inv       += inv
     except: self.inv    = inv
     return 
コード例 #5
0
                #Many (perhaps most) of the picks have no assigned channel.
                #use the iris client's ability to parse lists of channels to get
                #an appropriate waveform.

                #compute back-azimuth
                stalong=float(pick[4].strip())
                stalat=float(pick[5].strip())

                baz=distaz(evlat,evlong,stalat,stalong)[2]

                #get appropriate N/E waveforms to rotate

                #TODO convert this try/except block to use the IRIS client
                try:
                    #search for N/1 channel with high-frequency response
                    inv=irisclient.get_stations(starttime=starttime,endtime=endtime,station=st,level="channel",channel="BHN,BH1,HHN,HH1,SHN,SH1")
                    #find appropriate channels in the inventory (ASDF returns list, not inventory type)
                    chan=inv.get_contents()['channels'][0].split('.')

                    stream=irisclient.get_waveforms(chan[0],chan[1],chan[2],chan[3],starttime,endtime)
                    wf1=stream[0]
                    if wf1.stats['channel'][-1] == '1':
                        wf2=irisclient.get_waveforms(chan[0],chan[1],chan[2],chan[3][0:-1]+'2',starttime,endtime)[0]
                        wfz=irisclient.get_waveforms(chan[0],chan[1],chan[2],chan[3][0:-1]+'Z',starttime,endtime)[0]

                        stream=Stream(traces=[wfz,wf1,wf2])
                        #get an inventory including the Z and E/2 channels
                        IRISinv=irisclient.get_stations(starttime=starttime,endtime=endtime,station=st,level="channel")
                        stream=stream.rotate(method="->ZNE",inventory=IRISinv)
                        
                    else:
コード例 #6
0
from obspy import UTCDateTime
import os

starttime = UTCDateTime("2012-10-01T01:00:00")
endtime = UTCDateTime("2012-10-02T01:00:00")

output_file = "/g/data/ha3/US_test.h5"

temp_sta = "249A"
perm_sta = "255A"

client = Client("IRIS")

ref_inv = client.get_stations(network="TA",
                              station=perm_sta,
                              channel="BHZ",
                              starttime=starttime,
                              endtime=endtime,
                              level='channel')

print(ref_inv)

temp_inv = client.get_stations(network="TA",
                               station=temp_sta,
                               channel="BHZ",
                               starttime=starttime,
                               endtime=endtime,
                               level='channel')

print(temp_inv)

# get waveforms
コード例 #7
0
import matplotlib.animation as animation
Writer = animation.writers['pillow']
writer = Writer(fps=20, metadata=dict(artist='Me'), bitrate=1800)

sta = 'R32B'
loc = '00'
net = 'N4'
chan = 'HHZ'

stime = UTCDateTime('2019-08-16 12:59:10')
etime = stime + 120

client = Client()
inv = client.get_stations(network=net,
                          station=sta,
                          starttime=stime,
                          endtime=etime,
                          channel=chan,
                          level="response")
st = Stream()
st += client.get_waveforms(net, sta, loc, chan, stime, etime)

st.detrend('constant')
st.merge(fill_value=0)
st.attach_response(inv)
st.remove_response(output="DISP")
#st.rotate(method="->ZNE",inventory=inv)
st.filter("bandpass", freqmin=.5, freqmax=5)
tr = st[0]
t = np.linspace(0, (tr.stats.npts - 1) / tr.stats.sampling_rate,
                num=tr.stats.npts)
コード例 #8
0
 def read_TA_lst(self, infname, startdate=None, enddate=None,  startbefore=None, startafter=None, endbefore=None, endafter=None, location=None, channel=None,\
         includerestricted=False, minlatitude=None, maxlatitude=None, minlongitude=None, maxlongitude=None, \
         latitude=None, longitude=None, minradius=None, maxradius=None):
     """Get station inventory from IRIS server
     =======================================================================================================
     Input Parameters:
     startdate, enddata  - start/end date for searching
     network             - Select one or more network codes.
                             Can be SEED network codes or data center defined codes.
                                 Multiple codes are comma-separated (e.g. "IU,TA").
     station             - Select one or more SEED station codes.
                             Multiple codes are comma-separated (e.g. "ANMO,PFO").
     location            - Select one or more SEED location identifiers.
                             Multiple identifiers are comma-separated (e.g. "00,01").
                             As a special case ?--? (two dashes) will be translated to a string of two space
                             characters to match blank location IDs.
     channel             - Select one or more SEED channel codes.
                             Multiple codes are comma-separated (e.g. "BHZ,HHZ").
     includerestricted   - default is False
     minlatitude         - Limit to events with a latitude larger than the specified minimum.
     maxlatitude         - Limit to events with a latitude smaller than the specified maximum.
     minlongitude        - Limit to events with a longitude larger than the specified minimum.
     maxlongitude        - Limit to events with a longitude smaller than the specified maximum.
     latitude            - Specify the latitude to be used for a radius search.
     longitude           - Specify the longitude to the used for a radius search.
     minradius           - Limit to events within the specified minimum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     maxradius           - Limit to events within the specified maximum number of degrees from the
                             geographic point defined by the latitude and longitude parameters.
     =======================================================================================================
     """
     try:
         starttime = obspy.core.utcdatetime.UTCDateTime(startdate)
     except:
         starttime = None
     try:
         endtime = obspy.core.utcdatetime.UTCDateTime(enddate)
     except:
         endtime = None
     try:
         startbefore = obspy.core.utcdatetime.UTCDateTime(startbefore)
     except:
         startbefore = None
     try:
         startafter = obspy.core.utcdatetime.UTCDateTime(startafter)
     except:
         startafter = None
     try:
         endbefore = obspy.core.utcdatetime.UTCDateTime(endbefore)
     except:
         endbefore = None
     try:
         endafter = obspy.core.utcdatetime.UTCDateTime(endafter)
     except:
         endafter = None
     client = Client('IRIS')
     init_flag = True
     with open(infname, 'rb') as fio:
         for line in fio.readlines():
             network = line.split()[1]
             station = line.split()[2]
             if network == 'NET':
                 continue
             # print network, station
             if init_flag:
                 try:
                     inv     = client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, startbefore=startbefore, startafter=startafter,\
                                 endbefore=endbefore, endafter=endafter, channel=channel, minlatitude=minlatitude, maxlatitude=maxlatitude, \
                                     minlongitude=minlongitude, maxlongitude=maxlongitude, latitude=latitude, longitude=longitude, minradius=minradius, \
                                         maxradius=maxradius, level='channel', includerestricted=includerestricted)
                 except:
                     print('No station inv: ', line)
                     continue
                 init_flag = False
                 continue
             try:
                 inv     += client.get_stations(network=network, station=station, starttime=starttime, endtime=endtime, startbefore=startbefore, startafter=startafter,\
                             endbefore=endbefore, endafter=endafter, channel=channel, minlatitude=minlatitude, maxlatitude=maxlatitude, \
                                 minlongitude=minlongitude, maxlongitude=maxlongitude, latitude=latitude, longitude=longitude, minradius=minradius, \
                                     maxradius=maxradius, level='channel', includerestricted=includerestricted)
             except:
                 print('No station inv: ', line)
                 continue
     self.add_stationxml(inv)
     try:
         self.inv += inv
     except:
         self.inv = inv
     return
コード例 #9
0
ファイル: extract.py プロジェクト: nikosT/FDSNWS_2_FMNEAR
            )
            service = fdsn

        for i, pick in enumerate(picks):
            # get waveform data
            _st = service.get_waveforms(
                pick.waveform_id['network_code'],
                pick.waveform_id['station_code'],
                pick.waveform_id['location_code'] or '',
                pick.waveform_id['channel_code'][:2] + '?', starttime, endtime)

            # get station metadata
            _inv = fdsn.get_stations(
                starttime=starttime,
                endtime=endtime,
                network=pick.waveform_id['network_code'],
                station=pick.waveform_id['station_code'],
                location=pick.waveform_id['location_code'] or '',
                channel=pick.waveform_id['channel_code'][:2] + '?',
                level="channel")

            # Rotating streams when necessary while trimming to the same length all of them
            _st = _st.rotate(method="->ZNE",
                             inventory=_inv,
                             components=('ZNE'))

            # append stream to streams and inventory to inventories
            if i == 0:
                st = _st
                inv = _inv
            else:
                st += _st
コード例 #10
0
class Downloader:

    def __init__(self,df,station,outdir):

        self.station = station
        self.data = df
        self.out = outdir
        # self.summary = [] # list to hold all tr_ids
        # print(self.data)
#           Resets indexing of DataFrame

        # print('{}/{}_downloaded_streams.txt'.format(outdir,outdir.split('/')[-1]))
        try:
            #print('Make /Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}'.format(station))
            os.mkdir('{}/{}'.format(self.out,station))
        except FileExistsError:
            print('It already exists, Hooray! Less work for me!')

            # pass
    #   Made

        #self.outfile = open('/Users/ja17375/Shear_Wave_Splitting/Data/SAC_files/{}/{}_downloaded_streams_Jacks_Split.txt'.format(station,station),'w+')

        self.attempts = 0 #Counter for how many attempted downloads there were
        self.fdsnx = 0 #Counter for how many attempts hit a FDSNNoDataException
        self.dwn = 0 #Counter for how many events were downloaded
        self.ex = 0 #Counter for how many event already exist in filesystem and therefore werent downloaded
        self.ts = 0 #Counter for events who;s traces are too short.
        self.fdsnclient_evt = Client('IRIS') # Serparate client for events (hopefully to get round the "no event avialbel bug")
        self.fdsnclient = Client('IRIS')
#       Download Station Data

    def download_station_data(self):
        """
        Download or read important station data and make sure it is right
        """
        try:
            stat =  self.fdsnclient.get_stations(channel='BH?',station='{}'.format(self.station))
            self.network = stat.networks[0].code
            self.stla = stat.networks[0].stations[0].latitude
            self.stlo = stat.networks[0].stations[0].longitude
            # print(self.network)
            return True
        except FDSNNoDataException:
            return False

    def set_event_data(self,i,sep):
        """
        Function to download event information so we can get mroe accurate start times
        """
        self.evla = self.data.EVLA[i]
        self.evlo = self.data.EVLO[i]
        if sep is False:

            self.date = self.data.DATE[i]
            if 'TIME' in self.data.columns:
                self.time = self.data.TIME[i]
            else:
                self.time = '0000'

            datetime = str(self.date) + "T" + self.time #Combined date and time inputs for converstion t UTCDateTime object
            self.start = obspy.core.UTCDateTime(datetime)

            try:
                if 'TIME' in self.data.columns:
                    end = self.start + 60
                    print('Search starts {} , ends at {}'.format(self.start,end))
                    cat = self.fdsnclient_evt.get_events(starttime=self.start,endtime=self.start+86400 ,latitude=self.evla,longitude=self.evlo,maxradius=0.25,minmag=5.5) #Get event in order to get more accurate event times.
                    # self.time = '{:02d}{:02d}{:02d}'.format(cat[0].origins[0].time.hour,cat[0].origins[0].time.minute,cat[0].origins[0].time.second)
                else:
                    # No Time so we need to search over the whole day
                    end = self.start + 86400

                    print('Search starts {} , ends at {}'.format(self.start,end))
                    cat = self.fdsnclient_evt.get_events(starttime=self.start,endtime=self.start+86400 ,latitude=self.evla,longitude=self.evlo,maxradius=0.25,minmag=5.5) #Get event in order to get more accurate event times.
                if len(cat) > 1:
                    print("WARNING: MORE THAN ONE EVENT OCCURS WITHIN 5km Search!!")
                    print('Selecting Event with the largest magnitude')
                    # Select biggest magnitude
                    max_mag = max([cat[j].magnitudes[0].mag for j in [i for i,c in enumerate(cat)]])
                    cat = cat.filter('magnitude >= {}'.format(max_mag))
                    print(cat)

                self.time = '{:02d}{:02d}{:02d}'.format(cat[0].origins[0].time.hour,cat[0].origins[0].time.minute,cat[0].origins[0].time.second)
                self.start.minute = cat[0].origins[0].time.minute
                self.start.hour = cat[0].origins[0].time.hour
                print(self.time)

                self.start.second = cat[0].origins[0].time.second

                # Lines commented out as they are only needed if TIME is prvoided as hhmm (For Deng's events there is
                # no TIME provided so we just have to used the event time downloaded)
                # if self.start.minute != cat[0].origins[0].time.minute:
                #     self.time = self.time[:2] + str(cat[0].origins[0].time.minute) # Time is hhmm so we subtract the old minute value and add the new one

                dep = cat[0].origins[0].depth
                if dep is not None:
                    self.evdp = dep/1000.0 # divide by 1000 to convert depth to [km[]
                else:
                    self.evdp = 10.0 #Hard code depth to 10.0 km if evdp cannot be found
            except FDSNNoDataException:
                print("No Event Data Available")
                self.evdp = 0
            except FDSNException:
                print("FDSNException for get_events")
                # pass
        elif sep is True:
            self.start = obspy.core.UTCDateTime('{}'.format(self.data.DATE[i])) #iso8601=True
            self.date = '{:04d}{:03d}'.format(self.start.year,self.start.julday)
            self.time = '{:02d}{:02d}{:02d}'.format(self.start.hour,self.start.minute,self.start.second)
            self.evdp = self.data.EVDP[i]

    def download_traces(self,ch):
        """
        Function that downloads the traces for a given event and station
        """
        # if len(self.time) is 6:
        print('Start: {}. self.time: {}'.format(self.start,self.time))
        tr_id = "{}/{}/{}_{}_{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,ch)
        # elif len(self.time) is 4:
            # tr_id = "{}/{}/{}_{}_{}{}_{}.sac".format(self.out,self.station,self.station,self.date,self.time,self.start.second,ch)
        # print("Looking for :", tr_id)


        if ch == 'BHE':
            self.attempts += 1 # Counts the number of traces that downloads are attempted for

        if os.path.isfile(tr_id) == True:
            print("{} exists. It was not downloaded".format(tr_id)) # File does not exist

            if ch == 'BHE':
                out_id = '_'.join(tr_id.split('_')[0:-1])
                self.outfile.write('{}_\n'.format(out_id))
                # self.summary.append(out_id)
                self.ex += 1
        else:
            # print("It doesnt exists. Download attempted")
            st = obspy.core.stream.Stream() # Initialises our stream variable

            if self.network is 'BK':
                download_client = obspy.clients.fdsn.Client('NCEDC')
            else:
                download_client = obspy.clients.fdsn.Client('IRIS')
            try:
                st = download_client.get_waveforms(self.network,self.station,'??',ch,self.start,self.start + 3000,attach_response=True)
                # print(st)
                if len(st) > 3:
                    print("WARNING: More than three traces downloaded for event ", tr_id)
                elif len(st) < 3:
                    self.ts += 1

                dist_client = iris.Client() # Creates client to calculate event - station distance
                print('STLA {} STLO {} EVLA {} EVLO {}'.format(self.stla,self.stlo,self.evla,self.evlo))
                self.d = dist_client.distaz(stalat=self.stla,stalon=self.stlo,evtlat=self.evla,evtlon=self.evlo)
                print('Source-Reciever distance is {}'.format(self.d['distance']))
                if (self.d['distance'] >= 85.0) or (self.d['distance'] >=145.0):
                
                        if st[0].stats.endtime - st[0].stats.starttime >= 2000:
                            # print('Record length is {}, which is ok'.format(st[0].stats.endtime - st[0].stats.starttime))
                            self.write_st(st,tr_id)

                            if ch == 'BHE':
                                self.dwn += 1
                                out_id = '_'.join(tr_id.split('_')[0:-1])
                                self.outfile.write('{}_\n'.format(out_id))
                                # self.summary.append(out_id)

                        else:
                            print('Record length is {}, which is too short'.format(st[0].stats.endtime - st[0].stats.starttime))
                            if ch == 'BHE':
                                self.ts += 1
                else:
                    print("Source Reciever Distance is too small")
                    if ch == 'BHE':
                        self.ts += 1
            except FDSNException:
                print('No Data Exception??')
                if ch == 'BHE':
                    self.fdsnx += 1

    def write_st(self,st,tr_id):
        """

        """
        # print('Writing {}'.format(tr_id))
        st[0].write('holder.sac', format='SAC',) # Writes traces as SAC files
        #st.plot()
        st_2 = obspy.core.read('holder.sac')
        #sac = AttribDict() # Creates a dictionary sacd to contain all the header information I want.
        ## Set origin times
        st_2[0].stats.sac.nzyear = self.start.year
        st_2[0].stats.sac.nzjday = self.start.julday
        st_2[0].stats.sac.nzhour = self.start.hour
        st_2[0].stats.sac.nzmin = self.start.minute
        st_2[0].stats.sac.nzsec = self.start.second
        st_2[0].stats.sac.nzmsec = self.start.microsecond
        ## Station Paramters
        st_2[0].stats.sac.stla = self.stla
        st_2[0].stats.sac.stlo = self.stlo
        ## Event Paramters
        st_2[0].stats.sac.evla = self.evla#cat[0].origins[0].latitude # Event latitude
        st_2[0].stats.sac.evlo = self.evlo#cat[0].origins[0].longitude # Event longitude
        st_2[0].stats.sac.evdp = self.evdp#cat[0].origins[0].depth/1000 # Event depth
        st_2[0].stats.sac.kstnm = '{:>8}'.format(self.station)
        # print('stla = {}, stlo = {}, evla = {}, evlo = {}'.format(stla,stlo,evla,evlo))


        st_2[0].stats.sac.gcarc = self.d['distance'] # d.values returns the values from dictionary d produced by distaz. list converts this to a list attribute which can then be indexed to extract the great cricle distance in degrees
        st_2[0].stats.sac.dist = self.d['distancemeters']/1000 # Distnace in kilometers
        st_2[0].stats.sac.baz = self.d['backazimuth'] # Backzimuth (Reciever - SOurce)
        st_2[0].stats.sac.az = self.d['azimuth'] # Azimuth (Source - Receiver)
        st_2[0].write(tr_id, format='SAC',byteorder=1)
コード例 #11
0
 def __init__(self, stream):
     self.stream = stream
     client = Client("IRIS")
     self.inv = client.get_stations(network=stream.traces[0].stats.network,
                                    station=stream.traces[0].stats.station,
                                    level='response')
コード例 #12
0
def fdsnws2geomag():
    '''Convert fdsnws query to geomagnetic data file'''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['internet', 'iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument('--output',
                        default=sys.stdout,
                        help='Output file (default: stdout).')
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station', required=True, help='Station code')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        ",".join(DEFAULT_CHANNELS))
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    logging.info("Writing informtion to %s", str(args.output))
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream.merge_by_location().trim(starttime,
                                    endtime).write(args.output,
                                                   format=args.format,
                                                   inventory=inventory)
コード例 #13
0
def fdsnws2directory():
    '''
    Much like the fdsnws2geomag but is purely design to get the data from the FDSN-WS
    and add it according to the structure found on geomagnetic daqs servers.

    These structure vary depending on the source but can be customized by input argument.

    Filename for each can not be customized since these following strict naming convention.

    The convention can be found in the pygeomag/data/formats directory.
    '''
    parser = argparse.ArgumentParser(
        description=
        'Query the FDSN webservice and convert the geomagnetic data standards')
    parser.add_argument('--url',
                        default=DEFAULT_FDNWS,
                        help='FDSN-WS URL (default: %s)' % DEFAULT_FDNWS)
    parser.add_argument('--format',
                        choices=['iaga2002', 'imfv122'],
                        default='iaga2002',
                        help="Output format (default: iaga2002)")
    parser.add_argument(
        '--directory',
        default=DEFAULT_DIRECTORY,
        help=
        'Output directory with optional datetime parameter as accept by python datetime (default: %s).'
        % DEFAULT_DIRECTORY)
    # query specific parameters
    parser.add_argument('--date',
                        default=DEFAULT_DATE,
                        help='Date of the request (default: %s)' %
                        DEFAULT_DATE)
    parser.add_argument('--network',
                        default=DEFAULT_NETWORK,
                        help='Network code (default: DEFAULT_NETWORK)')
    parser.add_argument('--station',
                        default='*',
                        help='Station code (default: *)')
    parser.add_argument(
        '--location',
        nargs='+',
        default=DEFAULT_LOCATIONS,
        help=
        'Data type + source (data type = R - raw, D - definitive, source = 0,1,2,3..., default: %s)'
        % DEFAULT_LOCATIONS)
    parser.add_argument('--channel',
                        nargs='+',
                        default=DEFAULT_CHANNELS,
                        help='FDSN compliant channel query (default: %s)' %
                        "," % DEFAULT_CHANNELS)
    parser.add_argument('-v',
                        '--verbose',
                        action='store_true',
                        help='Verbosity')
    args = parser.parse_args()

    # Set the logging level
    logging.basicConfig(
        format='%(asctime)s.%(msecs)03d %(levelname)s \
            %(module)s %(funcName)s: %(message)s',
        datefmt="%Y-%m-%d %H:%M:%S",
        level=logging.INFO if args.verbose else logging.WARNING)

    # Convert date to starttime and endtime
    reftime = UTCDateTime(args.date)
    starttime = UTCDateTime(
        reftime.datetime.replace(hour=0, minute=0, second=0, microsecond=0))
    endtime = UTCDateTime(
        reftime.datetime.replace(hour=23,
                                 minute=59,
                                 second=59,
                                 microsecond=999999))

    # Create a handler client
    logging.info("Connecting to %s", args.url)
    client = Client(args.url)
    logging.info("Requesting data for %s.%s.%s.%s from %s to %s", args.network,
                 args.station, ",".join(args.location), ",".join(args.channel),
                 starttime.isoformat(), endtime.isoformat())
    stream = Stream(
        client.get_waveforms(args.network, args.station,
                             ",".join(args.location), ",".join(args.channel),
                             starttime, endtime))
    logging.info("Found stream: %s", str(stream.__str__(extended=True)))
    # Load optional inventory information
    inventory = client.get_stations(network=args.network, station=args.station)

    # Handle if no data was found
    if not stream:
        logging.warning("No data found")
        return 1

    # Before sending the raw data for writing, we need to trim the response
    # from the FDSNWS query to are actual request time.  We also merge by
    # location.
    # Correct the endtime with delta of the first trace
    endtime = UTCDateTime(reftime.datetime) + 86400 - stream[0].stats.delta
    stream = stream.merge_by_location().trim(starttime, endtime)

    # Loop through the list of stream and generate the unique list of station
    # codes.  We know the network code is constant and its a single sampling rate
    # request.
    stations = set([trace.stats.station for trace in stream])

    # Convert the directory format string to a full path
    directory = starttime.strftime(args.directory)
    logging.info("Creating directory %s if does not exist", directory)
    pathlib.Path(directory).mkdir(parents=True, exist_ok=True)

    for station in stations:
        # Extract the station I need
        extract = stream.select(station=station)
        # Generate its filename (depends on the format)
        if args.format in ['iaga2002']:
            filename = pygeomag.data.formats.iaga2002.get_filename(
                extract[0].stats)
        elif args.format in ['imfv122']:
            filename = pygeomag.data.formats.imfv122.get_filename(
                extract[0].stats)
        else:
            raise ValueError(
                "Unable to generate filename for unhandled format %s" %
                args.format)
        filename = os.path.join(directory, filename)
        logging.info("Writing magnetic data to %s", filename)
        extract.write(filename, format=args.format, inventory=inventory)
コード例 #14
0
ファイル: unite_conversion.py プロジェクト: zlinahot/STEAD
ax = fig.add_subplot(1, 1, 1)
ax.plot(tr_Z.times("matplotlib"), tr_Z.data, "k-")
ax.xaxis_date()
fig.autofmt_xdate()
plt.ylabel('counts')
plt.title('Raw Data')
fig.tight_layout()
plt.show()
fig.savefig('1_raw.png')   

# downloading the instrument response of the station from IRIS
client = Client("IRIS")
inventory = client.get_stations(network=dataset.attrs['network_code'],
                                station=dataset.attrs['receiver_code'],
                                starttime=UTCDateTime(dataset.attrs['trace_start_time']),
                                endtime=UTCDateTime(dataset.attrs['trace_start_time']) + 60,
                                loc="*", 
                                channel="*",
                                level="response")  
# exploring the downloaded response file   
print(inventory)
inventory[0].plot_response(min_freq=1E-4) 


# converting into displacement
st = make_stream(dataset)
st = st.remove_response(inventory=inventory, output="DISP", plot=False)
tr_Z = st[2]
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.plot(tr_Z.times("matplotlib"), tr_Z.data, "k-")
コード例 #15
0
mpl.rc('font', serif='Times')
mpl.rc('text', usetex=True)
mpl.rc('font', size=18)

debug = True
sta = 'COR'
chans = '00'
presloc = '30'
net = "IU"
stime = UTCDateTime('2019-121T00:00:00.0')
etime = stime + 3. * 24. * 60. * 60.

client = Client()
inv = client.get_stations(network="IU",
                          station=sta,
                          starttime=stime,
                          endtime=etime,
                          channel="LH*",
                          level='response')

if debug:
    print(inv)

ctime = stime
st = Stream()

#while ctime <= etime:
#    string = '/tr1/telemetry_days/IU_' + sta + '/' + str(ctime.year) + '/' + \
#                str(ctime.year) + '_' + str(ctime.julday).zfill(3) + '/*'
#    st += read(string + chans + '_LH*')
#    st += read(string + presloc + '_LDO*')
#    ctime += 24.*60.*60.
コード例 #16
0
ファイル: main.py プロジェクト: outdoorpet/QC_events
    def create_SG2K_initiate(self, event, quake_df):

        # Launch the custom station/component selection dialog
        sel_dlg = selectionDialog(parent=self,
                                  sta_list=self.station_list,
                                  chan_list=self.channel_codes)
        if sel_dlg.exec_():
            select_sta, select_comp = sel_dlg.getSelected()

            # specify output directory for miniSEED files
            temp_seed_out = os.path.join(os.path.dirname(self.cat_filename),
                                         event)

            # create directory
            if os.path.exists(temp_seed_out):
                shutil.rmtree(temp_seed_out)
            os.mkdir(temp_seed_out)

            query_time = UTCDateTime(quake_df['qtime'] - (10 * 60)).timestamp

            trace_starttime = UTCDateTime(quake_df['qtime'] - (5 * 60))
            trace_endtime = UTCDateTime(quake_df['qtime'] + (15 * 60))

            # Create a Stream object to put data into
            # st = Stream()
            # Create a dictionary to put traces into (keys are tr_ids)
            st_dict = defaultdict(list)

            print('---------------------------------------')
            print('Finding Data for Earthquake: ' + event)

            if os.path.splitext(self.db_filename)[1] == ".db":
                # run SQL query
                for matched_entry in self.session.query(Waveforms). \
                        filter(or_(and_(Waveforms.starttime <= query_time, query_time < Waveforms.endtime),
                                   and_(query_time <= Waveforms.starttime, Waveforms.starttime < query_time + 30 * 60)),
                               Waveforms.station.in_(select_sta),
                               Waveforms.component.in_(select_comp)):
                    print(matched_entry.ASDF_tag)

                    # read in the data to obspy
                    temp_st = read(
                        os.path.join(matched_entry.path,
                                     matched_entry.waveform_basename))

                    # modify network header
                    temp_tr = temp_st[0]
                    temp_tr.stats.network = matched_entry.new_network

                    # st.append(temp_tr)
                    st_dict[temp_tr.get_id()].append(temp_tr)

            if os.path.splitext(self.db_filename)[1] == ".json":
                # run python dictionary query
                for key, matched_entry in self.network_dict.iteritems():
                    if ((matched_entry['starttime'] <= query_time < matched_entry['endtime']) \
                                or (
                                query_time <= matched_entry['starttime'] and matched_entry['starttime'] < query_time + (
                            30 * 60))) \
                            and ((matched_entry['station'] in select_sta) and (
                                matched_entry['component'] in select_comp)):
                        print(matched_entry['ASDF_tag']
                              )  #, os.path.join(matched_entry['path'], key))

                        # read in the data to obspy
                        temp_st = read(os.path.join(matched_entry['path'],
                                                    key))

                        # modify network header
                        temp_tr = temp_st[0]
                        temp_tr.stats.network = matched_entry['new_network']

                        # trim trace to start and endtime
                        temp_tr.trim(starttime=trace_starttime,
                                     endtime=trace_endtime)

                        # st.append(temp_tr)
                        st_dict[temp_tr.get_id()].append(temp_tr)

            # free memory
            temp_st = None
            temp_tr = None

            if not len(st_dict) == 0:
                # .__nonzero__():

                print('')
                print('Merging Traces from %s Stations....' % len(st_dict))
                # Attempt to merge all traces with matching ID'S (same keys in dict) in place
                # st.merge()

                for key in st_dict.keys():
                    if len(st_dict[key]) > 1:
                        temp_st = Stream(traces=st_dict[key])
                        # merge in place
                        # print('\tMerging %s in Stream:' % temp_st.count())
                        temp_st.merge()
                        # assign trace back to dictionary key if there is data
                        if temp_st.__nonzero__():
                            print("Station {0} has {1} Seconds of data".format(
                                key, temp_st[0].stats.endtime -
                                temp_st[0].stats.starttime))
                            st_dict[key] = temp_st[0]
                        elif not temp_st.__nonzero__():
                            print("No Data for: %s" % key)
                            # no data for station delete key
                            del st_dict[key]
                            continue
                    elif len(st_dict[key]) == 1:
                        print("Station {0} has {1} Seconds of data".format(
                            key, st_dict[key][0].stats.endtime -
                            st_dict[key][0].stats.starttime))
                        st_dict[key] = st_dict[key][0]
                    elif len(st_dict[key]) == 0:
                        # no data for station delete key
                        print("No Data for: %s" % key)
                        del st_dict[key]

                print(
                    '\nTrimming Traces to 20 mins around earthquake time....')

                # now trim the st object to 5 mins
                # before query time and 15 minutes afterwards

                for key in st_dict.keys():

                    st_dict[key] = st_dict[key].trim(starttime=trace_starttime,
                                                     endtime=trace_endtime,
                                                     pad=True,
                                                     fill_value=0)

                # st.trim(starttime=trace_starttime, endtime=trace_endtime, pad=True, fill_value=0)

                try:
                    # write traces into temporary directory
                    # for tr in st:
                    for key in st_dict.keys():
                        if type(st_dict[key]) == Stream:
                            #there is a problem with network codes (two stations named the same)
                            #ignore it for now
                            continue
                        st_dict[key].write(os.path.join(
                            temp_seed_out, st_dict[key].get_id() + ".MSEED"),
                                           format="MSEED")
                    print("\nWrote Temporary MiniSEED data to: " +
                          temp_seed_out)
                    print('')
                except:
                    print("Something Went Wrong!")

            else:
                print("No Data for Earthquake!")

            # free memory
            st_dict = None

            # Now requesting reference station data from IRIS if desired
            if self.ref_radioButton.isChecked():
                ref_dir = os.path.join(temp_seed_out, 'ref_data')

                # create ref directory
                if os.path.exists(ref_dir):
                    shutil.rmtree(ref_dir)
                os.mkdir(ref_dir)

                # request stations that are close to the selected stations

                # first use the coords lists to get a bounding box for array
                def calc_bounding_box(x, y):
                    min_x, max_x = (min(x), max(x))
                    min_y, max_y = (min(y), max(y))

                    return (min_x, max_x, min_y, max_y)

                bb = calc_bounding_box(self.station_coords[0],
                                       self.station_coords[1])

                # request data for near earthquake time up to 5 degrees from bounding box of array
                print(
                    '\nRequesting Waveform Data from Nearby Permanent Network Stations....'
                )

                client = Client("IRIS")
                self.ref_inv = client.get_stations(
                    network="AU",
                    starttime=UTCDateTime(quake_df['qtime'] - (5 * 60)),
                    endtime=UTCDateTime(quake_df['qtime'] + (15 * 60)),
                    minlongitude=bb[0] - 2,
                    maxlongitude=bb[1] + 2,
                    minlatitude=bb[2] - 2,
                    maxlatitude=bb[3] + 2,
                    level='channel')

                print(self.ref_inv)

                ref_st = Stream()

                # go through inventory and request timeseries data
                for net in self.ref_inv:
                    for stn in net:
                        try:
                            ref_st += client.get_waveforms(
                                network=net.code,
                                station=stn.code,
                                channel='*',
                                location='*',
                                starttime=UTCDateTime(quake_df['qtime'] -
                                                      (5 * 60)),
                                endtime=UTCDateTime(quake_df['qtime'] +
                                                    (15 * 60)))
                        except FDSNException:
                            print(
                                'No Data for Earthquake from Reference Station: '
                                + stn.code)

                        else:
                            # plot the reference stations
                            js_call = "addRefStation('{station_id}', {latitude}, {longitude});" \
                                .format(station_id=stn.code, latitude=stn.latitude,
                                        longitude=stn.longitude)
                            self.web_view.page().mainFrame(
                            ).evaluateJavaScript(js_call)

                try:
                    # write ref traces into temporary directory
                    for tr in ref_st:
                        tr.write(os.path.join(ref_dir, tr.id + ".MSEED"),
                                 format="MSEED")
                    print("Wrote Reference MiniSEED data to: " + ref_dir)
                    print('\nEarthquake Data Query Done!!!')
                except:
                    print("Something Went Wrong Writing Reference Data!")

                self.ref_inv.write(os.path.join(ref_dir, "ref_metadata.xml"),
                                   format="STATIONXML")