示例#1
0
def getCatData(date, opt):

    """
    Download data from IRIS or Earthworm waveserver with padding and filter it. This is
    a specialized version getData() for catalog events, pulling a smaller amount of time
    around a known event.

    date: UTCDateTime of known catalog event
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream objects, one for cutting and the other for triggering
    """    
    
    nets = opt.network.split(',')
    stas = opt.station.split(',')
    locs = opt.location.split(',')
    chas = opt.channel.split(',')
    
    if opt.server == "IRIS":
        client = Client("IRIS")
    else:
        client = EWClient(opt.server, opt.port)
        
    st = Stream()
    for n in range(len(stas)):
        try:
            stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                    date - opt.atrig, date + 3*opt.atrig)
            stmp = stmp.filter("bandpass", freqmin=opt.fmin, freqmax=opt.fmax,
                corners=2, zerophase=True)
            stmp = stmp.merge(method=1, fill_value='interpolate')
        except (obspy.fdsn.header.FDSNException):
            try: # try again
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                        date - opt.atrig, date + 3*opt.atrig)
                stmp = stmp.filter("bandpass", freqmin=opt.fmin, freqmax=opt.fmax,
                    corners=2, zerophase=True)
                stmp = stmp.merge(method=1, fill_value='interpolate')
            except (obspy.fdsn.header.FDSNException):
                print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                stmp = Stream().extend([trtmp.copy()])
        # Resample to ensure all traces are same length
        if stmp[0].stats.sampling_rate != opt.samprate:
            stmp = stmp.resample(opt.samprate)
        st.extend(stmp.copy()) 
    
    st = st.trim(starttime=date-opt.atrig, endtime=date+3*opt.atrig, pad=True,
        fill_value=0)
    stC = st.copy() 

    return st, stC
示例#2
0
    def elab(self):
        print('tremorStart ' + UTCDateTime.now().strftime("%Y%m%d %H%M%S"))
        s = np.asarray(self.get_all_nslc())
        appTrace = Stream()
        stTrace = Stream()
        self._elRunning = True
        for network in np.unique(s[:, 0]):
            for station in np.unique(s[:, 1]):

                stTrace = self._traces.select(network, station)
                elab = {
                    'ts': np.long(self._tEnd.strftime("%Y%m%d%H%M%S"))

                }
                # TREMOR
                for tr in stTrace:
                    rms = {}
                    id = tr.get_id()
                    spl = id.split('.')
                    channel = spl[3]
                    elab[channel] = {}
                    tStart = self._tEnd - 60
                    appTrace = tr.copy()
                    appTrace.trim(tStart, self._tEnd)
                    appTrace.remove_response(self._inv)

                    for b in band:
                        bb = band[b]
                        trF = appTrace.copy()
                        trF.filter('bandpass', freqmin=bb[0], freqmax=bb[1], corners=2, zerophase=True)
                        rms[b] = np.sqrt(np.mean(trF.data ** 2))
                        # print(id+' '+str(b)+' '+str(rms))
                        elab[channel]['rms_' + b] = rms[b]
                nTr=network + '_' + station
                try:
                    self._elab[nTr][elab['ts']]=elab
                    self._elabHyst[nTr][elab['ts']] = elab
                except:
                    self._elab[nTr]={}
                    self._elab[nTr][elab['ts']] = elab
                    self._elabHyst[nTr] = {}
                    self._elabHyst[nTr][elab['ts']] = elab

                #pulisco e slavo
                m=np.long((self._tEnd-1440*60).strftime("%Y%m%d%H%M%S"))
                mm=np.min(list(self._elab[nTr].keys()))
                if mm<m:
                    self._elab[nTr].pop(mm)
                for e in self._elab:
                    filename = basePath + 'RT/ELAB_' + e + '.json'

                    with open(filename, 'w') as fp:
                        json.dump(list(self._elab[e].values()), fp)
                        fp.close()
        #np.savez('elSave',h=self._elabHyst,e=self._elab)
        self._elRunning = False
示例#3
0
def copy(orig):
    """
	True-copy a stream by creating a new stream and copying old attributes to it.
	This is necessary because the old stream accumulates *something* that causes
	CPU usage to increase over time as more data is added. This is a bug in obspy
	that I intend to find--or at the very least report--but until then this hack
	works fine and is plenty fast enough.

	In this example, we make a stream object with some RS 1Dv7 data and then copy it to a new stream:

	.. code-block:: python

		>>> import rsudp.raspberryshake as rs
		>>> from obspy.core.stream import Stream
		>>> rs.initRSlib(dport=8888, rsstn='R3BCF')
		>>> s = Stream()
		>>> d = rs.getDATA()
		>>> t = rs.make_trace(d)
		>>> s = rs.update_stream(s, d)
		>>> s
		1 Trace(s) in Stream:
		AM.R3BCF.00.EHZ | 2020-02-21T19:58:50.292000Z - 2020-02-21T19:58:50.532000Z | 100.0 Hz, 25 samples
		>>> s = rs.copy(s)
		>>> s
		1 Trace(s) in Stream:
		AM.R3BCF.00.EHZ | 2020-02-21T19:58:50.292000Z - 2020-02-21T19:58:50.532000Z | 100.0 Hz, 25 samples


	:param obspy.core.stream.Stream orig: The data stream to copy information from
	:rtype: obspy.core.stream.Stream
	:return: A low-memory copy of the passed data stream

	"""
    stream = Stream()
    for t in range(len(orig)):
        trace = Trace(data=orig[t].data)
        trace.stats.network = orig[t].stats.network
        trace.stats.location = orig[t].stats.location
        trace.stats.station = orig[t].stats.station
        trace.stats.channel = orig[t].stats.channel
        trace.stats.sampling_rate = orig[t].stats.sampling_rate
        trace.stats.starttime = orig[t].stats.starttime
        stream.append(trace).merge(fill_value=None)
    return stream.copy()
示例#4
0
def copy(orig):
    """
	True copy a stream by creating a new stream and copying old attributes to it.
	This is necessary because the old stream accumulates *something* that causes
	CPU usage to increase over time as more data is added. This is a bug in obspy
	that I intend to find--or at the very least report--but until then this hack
	works fine.
	"""
    stream = Stream()
    for t in range(len(orig)):
        trace = Trace(data=orig[t].data)
        trace.stats.network = orig[t].stats.network
        trace.stats.location = orig[t].stats.location
        trace.stats.station = orig[t].stats.station
        trace.stats.channel = orig[t].stats.channel
        trace.stats.sampling_rate = orig[t].stats.sampling_rate
        trace.stats.starttime = orig[t].stats.starttime
        stream.append(trace).merge(fill_value=None)
    return stream.copy()
示例#5
0
文件: trigger.py 项目: jinwuLi/REDPy
def getData(tstart, tend, opt):

    """
    Download data from files in a folder, from IRIS, or a Earthworm waveserver
    
    A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
    your data files, please ensure that your headers contain the correct SCNL information!

    tstart: UTCDateTime of beginning of period of interest
    tend: UTCDateTime of end of period of interest
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream objects, one for cutting and the other for triggering
    """    
    
    nets = opt.network.split(',')
    stas = opt.station.split(',')
    locs = opt.location.split(',')
    chas = opt.channel.split(',')
    
    st = Stream()
    
    if opt.server == 'file':
    
        # Generate list of files
        if opt.server == 'file':
            flist = list(itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,opt.filepattern)) for root, dirs, files in os.walk(opt.searchdir)))
                
        # Determine which subset of files to load based on start and end times and
        # station name; we'll fully deal with stations below
        flist_sub = []
        for f in flist:
            # Load header only
            stmp = obspy.read(f, headonly=True)
            # Check if station is contained in the stas list
            if stmp[0].stats.station in stas:
                # Check if contains either start or end time
                ststart = stmp[0].stats.starttime
                stend = stmp[-1].stats.endtime
                if (ststart<=tstart and tstart<=stend) or (ststart<=tend and
                    tend<=stend) or (tstart<=stend and ststart<=tend):
                    flist_sub.append(f)
        
        # Fully load data from file
        stmp = Stream()
        for f in flist_sub:
            tmp = obspy.read(f, starttime=tstart, endtime=tend+opt.maxdt)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)
    
        # Filter and merge
        stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax, corners=2,
            zerophase=True)
        stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
        for m in range(len(stmp)):
            if stmp[m].stats.sampling_rate != opt.samprate:
                stmp[m] = stmp[m].resample(opt.samprate)
        stmp = stmp.merge(method=1, fill_value=0)
        
        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)
            
        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m] and nets[n] in
                    netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print("Couldn't find "+stas[n]+'.'+chas[n]+'.'+nets[n]+'.'+locs[n])
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())
    
    else:   
     
        if '.' not in opt.server:
            client = Client(opt.server)
        else:
            client = EWClient(opt.server, opt.port)
        
        for n in range(len(stas)):
            try:
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                        tstart, tend+opt.maxdt)
                for m in range(len(stmp)):
                    stmp[m].data = np.where(stmp[m].data == -2**31, 0, stmp[m].data) # replace -2**31 (Winston NaN token) w 0
                stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                    corners=2, zerophase=True)
                stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                for m in range(len(stmp)):
                    if stmp[m].stats.sampling_rate != opt.samprate:
                        stmp[m] = stmp[m].resample(opt.samprate)
                stmp = stmp.merge(method=1, fill_value=0)
            except (obspy.clients.fdsn.header.FDSNException):
                try: # try again
                    stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                            tstart, tend+opt.maxdt)
                    for m in range(len(stmp)):
                        stmp[m].data = np.where(stmp[m].data == -2**31, 0, stmp[m].data) # replace -2**31 (Winston NaN token) w 0
                    stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                        corners=2, zerophase=True)
                    stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                    for m in range(len(stmp)):
                        if stmp[m].stats.sampling_rate != opt.samprate:
                            stmp[m] = stmp[m].resample(opt.samprate)
                    stmp = stmp.merge(method=1, fill_value=0)
                except (obspy.clients.fdsn.header.FDSNException):
                    print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                    trtmp = Trace()
                    trtmp.stats.sampling_rate = opt.samprate
                    trtmp.stats.station = stas[n]
                    stmp = Stream().extend([trtmp.copy()])
                                            
            # Last check for length; catches problem with empty waveserver
            if len(stmp) != 1:
                print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                stmp = Stream().extend([trtmp.copy()])
                
            st.extend(stmp.copy()) 
    
    # Edit 'start' time if using offset option
    if opt.maxdt:
        dts = np.fromstring(opt.offset, sep=',')
        for n, tr in enumerate(st):
            tr.stats.starttime = tr.stats.starttime-dts[n]
    
    st = st.trim(starttime=tstart, endtime=tend, pad=True, fill_value=0)
    stC = st.copy()
    
    return st, stC
def get_PGMs(tr,args):

   ta=Stream()
   ta=tr.copy()
   ts=tr.copy()


   for i in range(len(ta)):

      m_dis=0
      m_vel=0
      m_acc=0


      #### Displacement
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_dis=abs(max(ta[i]))
      else:
         m_dis=abs(min(ta[i]))
         
      #### Velocity
      ta[i].data = np.gradient(ta[i].data,ta[i].stats['delta'])
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_vel=abs(max(ta[i]))
      else:
         m_vel=abs(min(ta[i]))
      ts[i].data = ta[i].data
      
      #### Acceleration
      ta[i].data = np.gradient(ta[i].data,ta[i].stats['delta'])
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_acc=abs(max(ta[i]))
      else:
         m_acc=abs(min(ta[i]))

      #store obtained pgms
      tr[i].stats['max_dis']  = m_dis
      tr[i].stats['max_vel']  = m_vel
      tr[i].stats['max_acc']  = m_acc

   #define vectrors for Hz, T and G
   sa=args.sa.split(' ')
   spa=[]
   for l in range(len(sa)-1):
        spa.append(0)
   per=[]
   for l in range(len(sa)-1):
        per.append(0)
   ges=[]
   for l in range(len(sa)-1):
        ges.append(0)


   #now for each value of sa convolve with response of pendulum
   for j in range(len(sa)):
      #apply convolution
      if j >= 1:
           tu=ta.copy()
           T=eval(sa[j])*1.0
           D=eval(sa[0])
           Ts = '%5.3f' % (1/T)
           omega = (2 *  3.14159 * T)**2

           paz_sa=cornFreq2Paz(T,damp=D)
           paz_sa['sensitivity'] =omega 
           paz_sa['zeros'] = [] 
           for n in range(len(tu)):
              tu[n].simulate(paz_remove=None,paz_simulate=paz_sa,taper=True, simulate_sensitivity=True, taper_fraction=0.050000000000000003)
           
           
           per[j-1] = Ts
           
 #         #now measure for each i
           for i in range(len(tu)):
              if abs(max(tu[i])) >= abs(min(tu[i])):
                val=abs(max(tu[i]))
              else:
                val=abs(min(tu[i]))

              g=val/9.80665*100
              g='%10.3e' % (g)
              val='%10.3e' % (val)
              #here give spectral acceleration in standard units m/s^2
              # and not in g (suitable only for shakemap, can be
              # later converted
              tr[i]=UpdatePsaHeader(tr[i],j,val)

            
   for i in range(len(tr)):
     tr[i].stats['Tsa'] = per
     
   return tr
示例#7
0
    # Data features
    # fs_dat = 100.0
    fs_dat = check_stream_sampling_rate(HNx_st)  # 100Hz
    npts_dat = check_stream_npts(HNx_st)  # Variable
    t_dat = check_stream_t(HNx_st)  # Variable

    # Calculated values based on data and targets
    new_npts = npts_dat
    """  Check out the different tapers that can be used for Lanczos interpolation
         for a particular value of 'a' plot_lanczos_windows(a=a)
    """
    plot_lanczos_windows(a=a)
    # plot_lanczos_windows(a=a, filename=fig_path+"lanczos_windows_a="+str(a)+".png")

    # Is this easier to do in a loop (check values for each trace?)
    HNx_resamp = HNx_st.copy()
    HNx_resamp = HNx_resamp.interpolate(fs_dat,
                                        "lanczos",
                                        a=a,
                                        window=windows[0])

    #%% Plot this

    plots = False
    if plots:
        HNx_resamp.plot()

        for tr in HNx_resamp:
            tr.plot()

#%% Check stream histograms
示例#8
0
def get_stream(datasource,
               scnl,
               tstart,
               tend,
               fill_value=0,
               filepattern='*',
               filter=None,
               samprate=100,
               verbose=False):
    """
    Generalized (and more robust) way to retrieve waveform data through ObsPy
    Download data from files in a folder, from IRIS, or a Earthworm waveserver
    
    A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
    your data files, please ensure that your headers contain the correct SCNL information!
    tstart: UTCDateTime of beginning of period of interest
    tend: UTCDateTime of end of period of interest
    
    filepattern='*'
     You can specify a pattern for your files to reduce the files within the directory
     searched. For example, filepattern=2019.06.*.mseed if your files are miniSEED files
     named by date and you only want those from June 2019. Simple wildcarding is supported
     (i.e., * and ?, [] for ranges of values or lists) but not full regular expressions.
     
    samprate=100
     Resamples all waveforms to the same sample rate.
    
    Returns ObsPy stream objects
    
    Based on code by Alicia Hotovec-Ellis and Aaron Wech.
    
    Example:
    >>> get_stream(['vdap.org', 16024], ['HSR.EHZ.CC.--'], '2004-09-28T00:00:00', '2004-09-28T01:00:00')
    >>> get_stream(['file', '/Users/vdapseismo/data/'], ['HSR.EHZ.CC.--'], '2004-09-28T00:00:00', '2004-09-28T01:00:00')
    >>> get_stream(['IRIS'], ['HSR.EHZ.CC.--'], '2004-09-28T00:00:00', '2004-09-28T01:00:00')
    """

    from obspy import UTCDateTime
    import obspy
    from obspy.clients.fdsn import Client
    from obspy.clients.earthworm import Client as EWClient
    from obspy.core.trace import Trace
    from obspy.core.stream import Stream
    from obspy.signal.trigger import coincidence_trigger
    import numpy as np
    from scipy import stats
    from scipy.fftpack import fft
    import glob, os, itertools

    #print(datasource)
    #print(scnl)
    #print(tstart)
    #print(tend)

    tstart = UTCDateTime(tstart)
    tend = UTCDateTime(tend)

    nets = []
    stas = []
    locs = []
    chas = []
    for s in scnl:
        #print(s)
        nets.append(s.split('.')[2])
        stas.append(s.split('.')[0])
        locs.append(s.split('.')[3])
        chas.append(s.split('.')[1])

    st = Stream()

    if '/' in datasource:
        # Retrieve data from file structure

        flist = list(
            itertools.chain.from_iterable(
                glob.iglob(os.path.join(root, filepattern))
                for root, dirs, files in os.walk(datasource)))

        # Determine which subset of files to load based on start and end times and
        # station name; we'll fully deal with stations below
        flist_sub = []
        for f in flist:
            # Load header only
            stmp = obspy.read(f, headonly=True)
            # Check if station is contained in the stas list
            if stmp[0].stats.station in stas:
                # Check if contains either start or end time
                ststart = stmp[0].stats.starttime
                stend = stmp[0].stats.endtime
                if (ststart <= tstart and tstart <= stend) or (
                        ststart <= tend
                        and tend <= stend) or (tstart <= stend
                                               and ststart <= tend):
                    flist_sub.append(f)

        # Fully load data from file
        stmp = Stream()
        for f in flist_sub:
            tmp = obspy.read(f, starttime=tstart, endtime=tend)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)

        # merge
        stmp = stmp.taper(max_percentage=0.01)
        for m in range(len(stmp)):
            if stmp[m].stats.sampling_rate != samprate:
                stmp[m] = stmp[m].resample(samprate)
        stmp = stmp.merge(method=1, fill_value=fill_value)

        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)

        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m]
                        and nets[n] in netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print('No data found for {}.{}.{}.{}'.format(
                    stas[n], chas[n], nets[n], locs[n]))
                trtmp = Trace()
                trtmp.stats.sampling_rate = samprate
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())

    else:
        # retrieve data from server

        if '.' not in datasource:
            client = Client(datasource)
        else:
            datasource = datasource.split(':')
            client = EWClient(datasource[0], int(datasource[1]))

        for n in range(len(stas)):
            try:
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                                            tstart, tend)
                for m in range(len(stmp)):
                    #stmp[m].data = np.ma.masked_where(stmp[m].data == -2**31, stmp[m].data) # masks out all values of -2**31 (Winston NaN Token)
                    #stmp[m] = stmp[m].split().merge(method=0, fill_value='interpolate')[0] # splits trace at masked values; then re-merges using linear interpolation
                    stmp[m].data = np.where(stmp[m].data == -2**31, 0,
                                            stmp[m].data)
                    if stmp[m].stats.sampling_rate != samprate:
                        stmp[m] = stmp[m].resample(samprate)
                stmp = stmp.taper(max_percentage=0.01)
                stmp = stmp.merge(method=1, fill_value=fill_value)
            except (obspy.clients.fdsn.header.FDSNException):
                try:  # try again
                    stmp = client.get_waveforms(nets[n], stas[n], locs[n],
                                                chas[n], tstart, tend)
                    for m in range(len(stmp)):
                        #stmp[m].data = np.ma.masked_where(stmp[m].data == -2**31, stmp[m].data) # masks out all values of -2**31 (Winston NaN Token)
                        #stmp[m] = stmp[m].split().merge(method=0, fill_value='interpolate')[0] # splits trace at masked values; then re-merges using linear interpolation
                        stmp[m].data = np.where(stmp[m].data == -2**31, 0,
                                                stmp[m].data)
                        if stmp[m].stats.sampling_rate != samprate:
                            stmp[m] = stmp[m].resample(samprate)
                    stmp = stmp.taper(max_percentage=0.01)
                    stmp = stmp.merge(method=1, fill_value=fill_value)
                except (obspy.clients.fdsn.header.FDSNException):
                    print('No data found for {0}.{1}'.format(stas[n], nets[n]))
                    trtmp = Trace()
                    trtmp.stats.sampling_rate = samprate
                    trtmp.stats.station = stas[n]
                    stmp = Stream().extend([trtmp.copy()])

            # Last check for length; catches problem with empty waveserver
            if len(stmp) != 1:
                print('No data found for {}.{}.{}.{}'.format(
                    stas[n], chas[n], nets[n], locs[n]))
                trtmp = Trace()
                trtmp.stats.sampling_rate = samprate
                trtmp.stats.station = stas[n]
                stmp = Stream().extend([trtmp.copy()])

            st.extend(stmp.copy())

    st = st.trim(starttime=tstart,
                 endtime=tend,
                 pad=True,
                 fill_value=fill_value)

    return st
print(event.event_descriptions[0]['type'], ': ',event.event_descriptions[0]['text'])

c = arclinkClient(user='******')

start = event.origins[0].time
print('Origin time: ', start)
end = start + 3600

RLAS = c.get_waveforms(network='BW', station='RLAS', location='', channel='BJZ', starttime=start, endtime=end)

BHE = c.get_waveforms(network='GR', station='WET', location='', channel='BHE', starttime=start, endtime=end)
BHN = c.get_waveforms(network='GR', station='WET', location='', channel='BHN', starttime=start, endtime=end)
BHZ = c.get_waveforms(network='GR', station='WET', location='', channel='BHZ', starttime=start, endtime=end)

AC = Stream(traces=[BHE[0],BHN[0],BHZ[0]])
ac = AC.copy()


# **Remove the instrument responses of the instruments from the recordings + convert units**
# - convert Ring Laser recordings to nrad/s units using a conversion factor
# - remove the seismometer response using poles and zeros + convert from velocity to acceleration [nm/s^2] in one step
# - trim the traces to make sure start- and endtimes match for both instruments

# In[3]:

RLAS.detrend(type='linear')
RLAS[0].data = RLAS[0].data * 1/6.3191 * 1e-3

AC.detrend(type='linear')
AC.taper(max_percentage=0.05)
示例#10
0
文件: trigger.py 项目: jconvers/REDPy
def getData(tstart, tend, opt):
    """
    Download data from files in a folder, from IRIS, or a Earthworm waveserver
    
    A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
    your data files, please ensure that your headers contain the correct SCNL information!

    tstart: UTCDateTime of beginning of period of interest
    tend: UTCDateTime of end of period of interest
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream objects, one for cutting and the other for triggering
    """

    nets = opt.network.split(',')
    stas = opt.station.split(',')
    locs = opt.location.split(',')
    chas = opt.channel.split(',')

    st = Stream()

    if opt.server == 'SAC' or opt.server == 'miniSEED':

        # Generate list of files
        if opt.server == 'SAC':
            flist = glob.glob(opt.sacdir + '*.sac') + glob.glob(opt.sacdir +
                                                                '*.SAC')
        elif opt.server == 'miniSEED':
            flist = glob.glob(opt.mseeddir +
                              '*.mseed') + glob.glob(opt.mseeddir + '*.MSEED')

        # Load data from file
        stmp = Stream()
        for f in flist:
            tmp = obspy.read(f, starttime=tstart, endtime=tend)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)

        # Filter and merge
        stmp = stmp.filter('bandpass',
                           freqmin=opt.fmin,
                           freqmax=opt.fmax,
                           corners=2,
                           zerophase=True)
        stmp = stmp.taper(0.05, type='hann', max_length=opt.mintrig)
        for m in range(len(stmp)):
            if stmp[m].stats.sampling_rate != opt.samprate:
                stmp[m] = stmp[m].resample(opt.samprate)
        stmp = stmp.merge(method=1, fill_value=0)

        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)

        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m]
                        and nets[n] in netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print("Couldn't find " + stas[n] + '.' + chas[n] + '.' +
                      nets[n] + '.' + locs[n])
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())

    else:

        if '.' not in opt.server:
            client = Client(opt.server)
        else:
            client = EWClient(opt.server, opt.port)

        for n in range(len(stas)):
            try:
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                                            tstart, tend)
                stmp = stmp.filter('bandpass',
                                   freqmin=opt.fmin,
                                   freqmax=opt.fmax,
                                   corners=2,
                                   zerophase=True)
                stmp = stmp.taper(0.05, type='hann', max_length=opt.mintrig)
                for m in range(len(stmp)):
                    if stmp[m].stats.sampling_rate != opt.samprate:
                        stmp[m] = stmp[m].resample(opt.samprate)
                stmp = stmp.merge(method=1, fill_value=0)
            except (obspy.fdsn.header.FDSNException):
                try:  # try again
                    stmp = client.get_waveforms(nets[n], stas[n], locs[n],
                                                chas[n], tstart, tend)
                    stmp = stmp.filter('bandpass',
                                       freqmin=opt.fmin,
                                       freqmax=opt.fmax,
                                       corners=2,
                                       zerophase=True)
                    stmp = stmp.taper(0.05,
                                      type='hann',
                                      max_length=opt.mintrig)
                    for m in range(len(stmp)):
                        if stmp[m].stats.sampling_rate != opt.samprate:
                            stmp[m] = stmp[m].resample(opt.samprate)
                    stmp = stmp.merge(method=1, fill_value=0)
                except (obspy.fdsn.header.FDSNException):
                    print('No data found for {0}.{1}'.format(stas[n], nets[n]))
                    trtmp = Trace()
                    trtmp.stats.sampling_rate = opt.samprate
                    trtmp.stats.station = stas[n]
                    stmp = Stream().extend([trtmp.copy()])
            st.extend(stmp.copy())

    st = st.trim(starttime=tstart, endtime=tend, pad=True, fill_value=0)
    stC = st.copy()

    return st, stC
示例#11
0
文件: trigger.py 项目: ahotovec/REDPy
def getData(tstart, tend, opt):

    """
    Download data from files in a folder, from IRIS, or a Earthworm waveserver
    
    A note on SAC/miniSEED files: as this makes no assumptions about the naming scheme of
    your data files, please ensure that your headers contain the correct SCNL information!

    tstart: UTCDateTime of beginning of period of interest
    tend: UTCDateTime of end of period of interest
    opt: Options object describing station/run parameters
    
    Returns ObsPy stream objects, one for cutting and the other for triggering
    """    
    
    nets = opt.network.split(',')
    stas = opt.station.split(',')
    locs = opt.location.split(',')
    chas = opt.channel.split(',')
    
    st = Stream()
    
    if opt.server == 'SAC' or opt.server == 'miniSEED':
    
        # Generate list of files
        if opt.server == 'SAC':
            flist = list(itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.sac')) for root, dirs, files in os.walk(opt.sacdir)))+list(
                itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.SAC')) for root, dirs, files in os.walk(opt.sacdir)))
        elif opt.server == 'miniSEED':
            flist = list(itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.mseed')) for root, dirs, files in os.walk(opt.mseeddir)))+list(
                itertools.chain.from_iterable(glob.iglob(os.path.join(
                root,'*.MSEED')) for root, dirs, files in os.walk(opt.mseeddir)))
                
        # Determine which subset of files to load based on start and end times and
        # station name; we'll fully deal with stations below
        flist_sub = []
        for f in flist:
            # Load header only
            stmp = obspy.read(f, headonly=True)
            # Check if station is contained in the stas list
            if stmp[0].stats.station in stas:
                # Check if contains either start or end time
                ststart = stmp[0].stats.starttime
                stend = stmp[0].stats.endtime
                if (ststart<=tstart and tstart<=stend) or (ststart<=tend and
                    tend<=stend) or (tstart<=stend and ststart<=tend):
                    flist_sub.append(f)
        
        # Fully load data from file
        stmp = Stream()
        for f in flist_sub:
            tmp = obspy.read(f, starttime=tstart, endtime=tend+opt.maxdt)
            if len(tmp) > 0:
                stmp = stmp.extend(tmp)
    
        # Filter and merge
        stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax, corners=2,
            zerophase=True)
        stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
        for m in range(len(stmp)):
            if stmp[m].stats.sampling_rate != opt.samprate:
                stmp[m] = stmp[m].resample(opt.samprate)
        stmp = stmp.merge(method=1, fill_value=0)
        
        # Only grab stations/channels that we want and in order
        netlist = []
        stalist = []
        chalist = []
        loclist = []
        for s in stmp:
            stalist.append(s.stats.station)
            chalist.append(s.stats.channel)
            netlist.append(s.stats.network)
            loclist.append(s.stats.location)
            
        # Find match of SCNL in header or fill empty
        for n in range(len(stas)):
            for m in range(len(stalist)):
                if (stas[n] in stalist[m] and chas[n] in chalist[m] and nets[n] in
                    netlist[m] and locs[n] in loclist[m]):
                    st = st.append(stmp[m])
            if len(st) == n:
                print("Couldn't find "+stas[n]+'.'+chas[n]+'.'+nets[n]+'.'+locs[n])
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                st = st.append(trtmp.copy())
    
    else:   
     
        if '.' not in opt.server:
            client = Client(opt.server)
        else:
            client = EWClient(opt.server, opt.port)
        
        for n in range(len(stas)):
            try:
                stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                        tstart, tend+opt.maxdt)
                stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                    corners=2, zerophase=True)
                stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                for m in range(len(stmp)):
                    if stmp[m].stats.sampling_rate != opt.samprate:
                        stmp[m] = stmp[m].resample(opt.samprate)
                stmp = stmp.merge(method=1, fill_value=0)
            except (obspy.clients.fdsn.header.FDSNException):
                try: # try again
                    stmp = client.get_waveforms(nets[n], stas[n], locs[n], chas[n],
                            tstart, tend+opt.maxdt)
                    stmp = stmp.filter('bandpass', freqmin=opt.fmin, freqmax=opt.fmax,
                        corners=2, zerophase=True)
                    stmp = stmp.taper(0.05,type='hann',max_length=opt.mintrig)
                    for m in range(len(stmp)):
                        if stmp[m].stats.sampling_rate != opt.samprate:
                            stmp[m] = stmp[m].resample(opt.samprate)
                    stmp = stmp.merge(method=1, fill_value=0)
                except (obspy.clients.fdsn.header.FDSNException):
                    print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                    trtmp = Trace()
                    trtmp.stats.sampling_rate = opt.samprate
                    trtmp.stats.station = stas[n]
                    stmp = Stream().extend([trtmp.copy()])
                                            
            # Last check for length; catches problem with empty waveserver
            if len(stmp) != 1:
                print('No data found for {0}.{1}'.format(stas[n],nets[n]))
                trtmp = Trace()
                trtmp.stats.sampling_rate = opt.samprate
                trtmp.stats.station = stas[n]
                stmp = Stream().extend([trtmp.copy()])
                
            st.extend(stmp.copy()) 
    
    # Edit 'start' time if using offset option
    if opt.maxdt:
        dts = np.fromstring(opt.offset, sep=',')
        for n, tr in enumerate(st):
            tr.stats.starttime = tr.stats.starttime-dts[n]
    
    st = st.trim(starttime=tstart, endtime=tend, pad=True, fill_value=0)
    stC = st.copy()
    
    return st, stC