Esempio n. 1
0
def corn_freq_2_paz(fc, damp):

    from obspy.signal import cornFreq2Paz

    paz_out = cornFreq2Paz(fc, damp)

    return paz_out
    def _resi(self, x, *args):
        '''
        :param numpy array x: Containing original guess for free period, damping, and sensitivity
        :param tuple *args: tuple containing the step calibration input signal and output signal.     
        '''
        f = x[0]
        h = x[1]
        sen = x[2]
        trIN = args[0]
        trOUT = args[1]

        paz = cornFreq2Paz(f, h)
        paz['zeros'] = [0.]
        paz['sensitivity'] = sen
        trINCP = trIN.copy()
        trINCP.trim(trINCP.stats.starttime + 50, trINCP.stats.endtime - 50)
        trINCP.detrend('constant')
        trINCP.normalize()

        trOUTsim = trOUT.copy()
        trOUTsim.simulate(paz_remove=paz)
        trOUTsim.trim(
            trOUTsim.stats.starttime + 50, trOUTsim.stats.endtime - 50)
        trOUTsim.detrend('constant')
        trOUTsim.normalize()

        comp = sum((trOUTsim.data - trINCP) ** 2)
        return comp
Esempio n. 3
0
    def _resi(self, x, *args):
        '''
        :param numpy array x: Containing original guess for free period, damping, and sensitivity
        :param tuple *args: tuple containing the step calibration input signal and output signal.     
        '''
        f = x[0]
        h = x[1]
        sen = x[2]
        trIN = args[0]
        trOUT = args[1]

        paz = cornFreq2Paz(f, h)
        paz['zeros'] = [0.]
        paz['sensitivity'] = sen
        trINCP = trIN.copy()
        trINCP.trim(trINCP.stats.starttime + 50, trINCP.stats.endtime - 50)
        trINCP.detrend('constant')
        trINCP.normalize()

        trOUTsim = trOUT.copy()
        trOUTsim.simulate(paz_remove=paz)
        trOUTsim.trim(trOUTsim.stats.starttime + 50,
                      trOUTsim.stats.endtime - 50)
        trOUTsim.detrend('constant')
        trOUTsim.normalize()

        comp = sum((trOUTsim.data - trINCP)**2)
        return comp
Esempio n. 4
0
 def __set_paz_from_cornfreqs(self):
     for _i, (f, h) in enumerate(zip(self.boxes_corn_freqs,
                                     self.boxes_dampings)):
         f = f.value()
         h = h.value()
         paz = cornFreq2Paz(f, h)
         pole1 = paz['poles'][0]
         pole2 = paz['poles'][1]
         self.boxes_poles_real[_i*2].setValue(pole1.real)
         self.boxes_poles_imag[_i*2].setValue(pole1.imag)
         self.boxes_poles_real[_i*2+1].setValue(pole2.real)
         self.boxes_poles_imag[_i*2+1].setValue(pole2.imag)
Esempio n. 5
0
 def __set_paz_from_cornfreqs(self):
     for _i, (f, h) in enumerate(
             zip(self.boxes_corn_freqs, self.boxes_dampings)):
         f = f.value()
         h = h.value()
         paz = cornFreq2Paz(f, h)
         pole1 = paz['poles'][0]
         pole2 = paz['poles'][1]
         self.boxes_poles_real[_i * 2].setValue(pole1.real)
         self.boxes_poles_imag[_i * 2].setValue(pole1.imag)
         self.boxes_poles_real[_i * 2 + 1].setValue(pole2.real)
         self.boxes_poles_imag[_i * 2 + 1].setValue(pole2.imag)
Esempio n. 6
0
   def convolution_automatic(self,stream_data,parser_data):
      #st=stream_data.values().copy()
      #for tr in st and key in parser_data:
   
      inst2hz = cornFreq2Paz(float(self.inst2hz.getvalue())) #what the hell is this?
      waterLevel=float(self.water_level.getvalue())
      parser_keys=parser_data.keys()
      for key, st in stream_data.items():
         for pr_key in parser_data.keys():
            if pr_key.find(key[0:2]):
               pr=parser_data.get(pr_key)
               for tr in st:
                  paz=pr.getPAZ(tr.stats)
            
                  df = tr.stats.sampling_rate
                  tr.data = seisSim(tr.data, df, paz_remove=paz, paz_simulate=inst2hz,
						     water_level=waterLevel) 
                  stream_data[key+'_converted']=tr.copy()
            
               print "Try the manual option"
                      
               
      return stream_data
Esempio n. 7
0
    def computeStepCal(self):
        # cal duration needs to be divided by 10000 for step cals only.  This
        # only applies for when you are reading the cal duration from the
        # database.
        if (self.dbconn is not None):
            # divide by 10000 when getting the cal_duration from the database
            duration = self.cal_duration / 10000.0
        else:
            duration = self.cal_duration

        # Determine the type of sensor from the metadata
        sensor = self._determineSensorType()

        # ignores every location except for Z for triaxial STS-2s
        if ((self.dbconn is not None) and ("Z" not in self.outChannel) and
            (sensor == "STS-2HG" or sensor == "STS-4B" or sensor == "STS-2")):
            print("Skipped " + str(self.outChannel) + ' ' + sensor)

        # get the poles values for the sensor type
        pz = self._pzvals(sensor)

        # read data for the calibration
        try:
            stOUT = Stream()
            stime = UTCDateTime(self.startdate) - 5 * 60
            stOUT = read(self.dataOutLoc,
                         starttime=stime,
                         endtime=stime + duration + 5 * 60 + 900)
            stOUT.merge()
            stIN = read(self.dataInLoc,
                        starttime=stime,
                        endtime=stime + duration + 5 * 60 + 900)
            stIN.merge()
            trIN = stIN[0]
            trOUT = stOUT[0]
            trOUT.filter('lowpass', freq=.1)
            trIN.filter('lowpass', freq=.1)
            trIN.detrend('constant')
            trIN.normalize()
            trOUT.detrend('constant')
            trOUT.normalize()
            temp = trOUT.copy()
            temp.trim(endtime=stime + int(duration / 2.))
            if temp.max() < 0.0:
                trOUT.data = -trOUT.data
        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to read data for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable read data
                                          for manual input file ''' +
                                          str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # compute corner (cutoff) frequency
            f = 1. / (2 * math.pi / abs(pz['poles'][0]))
            # compute damping ratio
            h = abs(pz['poles'][0].real) / abs(pz['poles'][0])
            sen = 10.0

            print('Using: h=' + str(h) + ' f=' + str(f) + ' sen = ' + str(sen))

            x = numpy.array([f, h, sen])
            try:
                # compute best fit
                bf = fmin(self._resi,
                          x,
                          args=(trIN, trOUT),
                          xtol=10**-8,
                          ftol=10**-3,
                          disp=False)
            except:
                bf = x

        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to calculate {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                          perform corner freq, damping ratio,
                                          and best fit calculations for input
                                          file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            pazNOM = cornFreq2Paz(f, h)
            pazNOM['zeros'] = [0. + 0.j]

            pazPERT = cornFreq2Paz(bf[0], bf[1])
            pazPERT['zeros'] = [0]

            trOUTsimPert = trOUT.copy()
            trOUTsimPert.simulate(paz_remove=pazPERT)
            trOUTsimPert.trim(trOUTsimPert.stats.starttime + 50,
                              trOUTsimPert.stats.endtime - 50)
            trOUTsimPert.detrend('constant')
            trOUTsimPert.normalize()

            trOUTsim = trOUT.copy()

            trOUTsim.simulate(paz_remove=pazNOM)
            trOUTsim.trim(trOUTsim.stats.starttime + 50,
                          trOUTsim.stats.endtime - 50)
            trOUTsim.detrend('constant')
            trOUTsim.normalize()

            trIN.trim(trIN.stats.starttime + 50, trIN.stats.endtime - 50)
            trIN.detrend('constant')
            trIN.normalize()

            compOUT = sum((trOUTsim.data - trIN.data)**2)
            compOUTPERT = sum((trOUTsimPert.data - trIN.data)**2)
        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to do calculation for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                         perform poles calculation or input
                                         file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # create a plot for the step calibration and save it to the ./temp
            # directory.  This directory will be deleted when the program is
            # finished running.
            plt.clf()
            t = numpy.arange(
                0, trOUTsim.stats.npts / trOUTsim.stats.sampling_rate,
                trOUTsim.stats.delta)
            plt.plot(t, trIN.data, 'b', label='input')
            plt.plot(t,
                     trOUTsim.data,
                     'k',
                     label='h=' + str(round(h, 6)) + ' f=' + str(round(f, 6)) +
                     ' resi=' + str(round(compOUT, 6)))
            plt.plot(t,
                     trOUTsimPert.data,
                     'g',
                     label='h=' + str(round(bf[1], 6)) + ' f=' +
                     str(round(bf[0], 6)) + ' resi=' +
                     str(round(compOUTPERT, 6)))
            plt.xlabel('Time (s)')
            plt.ylabel('Cnts normalized')
            plt.title('Step Calibration ' + trOUT.stats.station + ' ' +
                      str(trOUT.stats.starttime.year) + ' ' +
                      str(trOUT.stats.starttime.julday).zfill(3))
            plt.legend(prop={'size': 6})
            plt.savefig('temp/' + str(trOUT.stats.station) +
                        str(self.outChannel) + str(self.location) +
                        str(self.startdate.year) + str(self.julianday) +
                        'step.png',
                        format="png",
                        dpi=400)
        except:
            if (self.dbconn is not None):
                self.stepcal_logger.error('Unable to plot {' + 'network = ' +
                                          self.network + ', station = ' +
                                          self.station + ', sensor = ' +
                                          str(sensor) + ', location = ' +
                                          str(self.location) + ', channel = ' +
                                          str(self.outChannel) + '}')
            else:
                self.stepcal_logger.error(
                    '(Manual Override) Unable to make plot for input file ' +
                    str(self.dataInLoc) + ' and output file ' +
                    str(self.dataOutLoc))
        if (self.dbconn is not None):
            try:
                plt.close()
                # insert results into the database
                fin = open(
                    'temp/' + str(trOUT.stats.station) + str(self.outChannel) +
                    str(self.location) + str(self.startdate.year) +
                    str(self.julianday) + 'step.png', 'rb')
                imgdata = fin.read()
                cur = self.dbconn.cursor()
                cur.execute(
                    '''INSERT INTO tbl_300calresults (fk_calibrationid,
                              nominal_cornerfreq, nominal_dampingratio, nominal_resi,
                              fitted_cornerfreq, fitted_dampingratio, fitted_resi,
                              outchannel, stepcal_img)
                              VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)''', [
                        self.cal_id,
                        round(f, 6),
                        round(h, 6),
                        round(compOUT, 6),
                        round(bf[0], 6),
                        round(bf[1], 6),
                        round(compOUTPERT, 6),
                        str(self.outChannel),
                        psycopg2.Binary(imgdata)
                    ])
                self.dbconn.commit()
            except:
                self.stepcal_logger.error(
                    'Unable to insert into database for {' + 'network = ' +
                    self.network + ', station = ' + self.station +
                    ', sensor = ' + str(sensor) + ', location = ' +
                    str(self.location) + ', channel = ' +
                    str(self.outChannel) + '}')

        else:
            try:
                print('nominal corner freq = ' + str(round(f, 6)) +
                      ', nominal damping ratio = ' + str(round(h, 6)) +
                      ', nominal best fit = ' + str(round(compOUT, 6)) +
                      ', fitted corner freq = ' + str(round(bf[0], 6)) +
                      ', fitted damping ratio = ' + str(round(bf[1], 6)) +
                      ', pert best fit ' + str(round(compOUTPERT, 6)))
                plt.show()
                plt.close()
            except:
                print(
                    '(Manual Override) Error displaying calculation results.')
Esempio n. 8
0
def xcorrEvents(starttime,
                endtime,
                network_id='*',
                station_id='*',
                location_id='',
                channel_id='EHZ',
                phase='P',
                time_window=(-1, 6),
                method='manual',
                merge=True):
    """
    @param method: 'manual' or 'auto' or None.
    """
    wildcard = "%s.%s.%s.%s" % (network_id, station_id, location_id,
                                channel_id)
    # PAZ of instrument to simulate, 2.0Hz corner-frequency, 0.707 damping
    inst = cornFreq2Paz(2.0)
    # get all events between start and end time
    client = Client("http://teide.geophysik.uni-muenchen.de:8080",
                    user="******",
                    password="******")
    event_list = client.event.getList(datetime=(starttime, endtime),
                                      localisation_method=method)

    print "Fetching events ..."
    networks = {}
    for event in event_list:
        id = event['resource_name']
        print "  EVENT:", str(event['datetime']), id
        # request event resource
        res = client.event.getXMLResource(id)
        # fetch all picks with given phase
        pick_list = res.xpath("/event/pick[phaseHint='%s']" % phase)
        # cycle through picks
        streams = []
        for pick in pick_list:
            temp = {}
            try:
                dt = UTCDateTime(str(pick.time.value))
            except:
                continue
            sid = pick.waveform.attrib['stationCode']
            nid = pick.waveform.attrib['networkCode'] or 'BW'
            cid = pick.waveform.attrib['channelCode']
            lid = pick.waveform.attrib['locationCode']
            pid = '%s.%s.%s.%s' % (nid, sid, lid, cid)
            print "    PICK: %s - %s - %s" % (pid, phase, dt)
            if not fnmatch.filter([pid], wildcard):
                continue
            # generate station/network list
            networks.setdefault(nid, {})
            networks[nid].setdefault(sid, [])
            networks[nid][sid].append((event, dt))

    print
    print "Correlate events over each station ..."
    # cycle through all networks/stations/events
    for nid, stations in networks.iteritems():
        for sid, events in stations.iteritems():
            print "  %s.%s:" % (nid, sid)
            if len(events) < 2:
                print "    -> Skipping: Need at least 2 events per station"
                print
                continue
            streams = []
            for event in events:
                id = event[0]['resource_name']
                dt = event[1]
                # get station PAZ for this date time
                paz = client.station.getPAZ(nid, sid, dt, location_id,
                                            channel_id)
                if not paz:
                    print "!!! Missing PAZ for %s.%s for %s" % (nid, sid, dt)
                    continue
                # get waveforms
                try:
                    stream = client.waveform.getWaveform(
                        nid, sid, location_id, channel_id, dt + time_window[0],
                        dt + time_window[1])
                except:
                    msg = "!!! Error fetching waveform for %s.%s.%s.%s for %s"
                    print msg % (nid, sid, location_id, channel_id, dt)
                    continue
                if merge:
                    stream.merge()
                for trace in stream:
                    # calculate zero mean
                    trace.data = trace.data - trace.data.mean()
                    # instrument correction
                    #trace.data = seisSim(trace.data, trace.stats.sampling_rate,
                    #                     paz, inst_sim=inst, water_level=50.0)
                    trace.data = bandpassZPHSH(trace.data,
                                               2.0,
                                               20.0,
                                               df=trace.stats.sampling_rate,
                                               corners=4)
                    print '    Got Trace:', trace
                # append
                streams.append((id, stream))
            # cross correlation over all prepared streams
            l = len(streams)
            if l < 2:
                print "    -> Skipping: Need at least 2 events per station"
                print
                fp.close()
                continue
            # output file
            filename = "%s.%s.txt" % (nid, sid)
            fp = open(filename, "w")
            # xcorr
            for i in range(0, l - 1):
                id1 = streams[i][0]
                tr1 = streams[i][1][0]
                for j in range(i + 1, l):
                    id2 = streams[j][0]
                    tr2 = streams[j][1][0]
                    # check sampling rate for both traces
                    if tr1.stats.sampling_rate != tr2.stats.sampling_rate:
                        print
                        print "!!! Sampling rate are not equal!"
                        continue
                    if tr1.stats.npts != tr2.stats.npts:
                        print
                        print "!!! Number of samples are not equal!"
                        continue
                    # divide by 2.0 as in eventcluster.c line 604
                    # remove last sample if npts is an odd number
                    delta = -1 * (tr1.stats.npts % 2)
                    winlen = int((tr1.stats.npts + delta) / 2.0)
                    shift, coe = xcorr(tr1.data[:delta].astype('float32'),
                                       tr2.data[:delta].astype('float32'),
                                       winlen)
                    fp.write("%d %d %.3f %d %s %s\n" %
                             (i + 1, j + 1, coe, shift, id1, id2))
            print
            fp.close()
Esempio n. 9
0
import pickle, urllib
from obspy.core import UTCDateTime
from obspy.signal.array_analysis import sonic
from obspy.signal import cornFreq2Paz

# Load data
stream = pickle.load(urllib.urlopen("http://examples.obspy.org/agfa.dump"))
print stream
print stream[0].stats

#
# Instrument correction to 1Hz corner frequency
paz1hz = cornFreq2Paz(1.0, damp=0.707)
stream.simulate(paz_remove='self', paz_simulate=paz1hz)

# Execute sonic
arguments = dict(
    # slowness grid: X min, X max, Y min, Y max, Slow Step
    sll_x=-3.0,
    slm_x=3.0,
    sll_y=-3.0,
    slm_y=3.0,
    sl_s=0.03,
    # sliding window propertieds
    win_len=1.0,
    win_frac=0.05,
    # frequency properties
    frqlow=1.0,
    frqhigh=8.0,
    prewhiten=0,
    # restrict output
Esempio n. 10
0
    # merging
    try:
        st.merge(0)
    except Exception, e:
        summary.append("Error while merging:")
        summary.append(str(e))
        summary = "\n".join(summary)
        summary += "\n" + "\n".join(("%s=%s" % (k, v) for k, v in PAR.items()))
        open(SUMMARY, "at").write(summary + "\n")
        continue

    # preprocessing, keep original data for plotting at end
    for tr in st:
        tr.data = detrend(tr.data)
    st.simulate(paz_remove="self",
                paz_simulate=cornFreq2Paz(1.0),
                remove_sensitivity=False)
    st.sort()
    st_trigger = st.copy()
    st_trigger.filter("bandpass",
                      freqmin=PAR.LOW,
                      freqmax=PAR.HIGH,
                      corners=1,
                      zerophase=True)
    st.trim(T1, T2)
    st_trigger.trim(T1, T2)
    st_trigger.trigger("recstalta", sta=PAR.STA, lta=PAR.LTA)
    summary.append(str(st))

    # do the triggering
    trigger_list = []
Esempio n. 11
0
import numpy as np
import matplotlib.pyplot as plt
from obspy.core import read
from obspy.signal import seisSim, cornFreq2Paz
from copy import deepcopy

onehzinst = cornFreq2Paz(1.0, damp=0.707)  # 1Hz instrument
trace = read("http://examples.obspy.org/RJOB20090824.ehz")[0]
trace.data = trace.data - trace.data.mean()
sts2 = {
    'gain':
    60077000.0,
    'poles': [(-0.037004000000000002 + 0.037016j),
              (-0.037004000000000002 - 0.037016j), (-251.33000000000001 + 0j),
              (-131.03999999999999 - 467.29000000000002j),
              (-131.03999999999999 + 467.29000000000002j)],
    'sensitivity':
    2516778400.0,
    'zeros': [0j, 0j]
}
data1 = deepcopy(trace.data)
trace.simulate(paz_remove=sts2, paz_simulate=onehzinst)
data2 = trace.data

# The plotting, plain matplotlib
t = np.arange(trace.stats.npts) / trace.stats.sampling_rate
plt.subplot(211)
plt.plot(t, data1, 'k')
plt.ylabel('STS-2 [counts]')
#
plt.subplot(212)
Esempio n. 12
0
def get_PGMs(tr,args):

   ta=Stream()
   ta=tr.copy()
   ts=tr.copy()


   for i in range(len(ta)):

      m_dis=0
      m_vel=0
      m_acc=0


      #### Displacement
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_dis=abs(max(ta[i]))
      else:
         m_dis=abs(min(ta[i]))
         
      #### Velocity
      ta[i].data = np.gradient(ta[i].data,ta[i].stats['delta'])
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_vel=abs(max(ta[i]))
      else:
         m_vel=abs(min(ta[i]))
      ts[i].data = ta[i].data
      
      #### Acceleration
      ta[i].data = np.gradient(ta[i].data,ta[i].stats['delta'])
      if abs(max(ta[i])) >= abs(min(ta[i])):
         m_acc=abs(max(ta[i]))
      else:
         m_acc=abs(min(ta[i]))

      #store obtained pgms
      tr[i].stats['max_dis']  = m_dis
      tr[i].stats['max_vel']  = m_vel
      tr[i].stats['max_acc']  = m_acc

   #define vectrors for Hz, T and G
   sa=args.sa.split(' ')
   spa=[]
   for l in range(len(sa)-1):
        spa.append(0)
   per=[]
   for l in range(len(sa)-1):
        per.append(0)
   ges=[]
   for l in range(len(sa)-1):
        ges.append(0)


   #now for each value of sa convolve with response of pendulum
   for j in range(len(sa)):
      #apply convolution
      if j >= 1:
           tu=ta.copy()
           T=eval(sa[j])*1.0
           D=eval(sa[0])
           Ts = '%5.3f' % (1/T)
           omega = (2 *  3.14159 * T)**2

           paz_sa=cornFreq2Paz(T,damp=D)
           paz_sa['sensitivity'] =omega 
           paz_sa['zeros'] = [] 
           for n in range(len(tu)):
              tu[n].simulate(paz_remove=None,paz_simulate=paz_sa,taper=True, simulate_sensitivity=True, taper_fraction=0.050000000000000003)
           
           
           per[j-1] = Ts
           
 #         #now measure for each i
           for i in range(len(tu)):
              if abs(max(tu[i])) >= abs(min(tu[i])):
                val=abs(max(tu[i]))
              else:
                val=abs(min(tu[i]))

              g=val/9.80665*100
              g='%10.3e' % (g)
              val='%10.3e' % (val)
              #here give spectral acceleration in standard units m/s^2
              # and not in g (suitable only for shakemap, can be
              # later converted
              tr[i]=UpdatePsaHeader(tr[i],j,val)

            
   for i in range(len(tr)):
     tr[i].stats['Tsa'] = per
     
   return tr
Esempio n. 13
0
    return int(sec * trace.stats.sampling_rate)


def trId(stats):
    return stats.endtime, "%s%s%s%f" % (stats.network, stats.station,
                                        stats.channel, stats.sampling_rate)


mseed_files = sys.argv[1:]
if mseed_files == []:
    print __doc__
    sys.exit(1)

client = Client()

inst = cornFreq2Paz(1.0)
nfft = 4194304  # next nfft of 5h
station_list = []
last_endtime, last_id = 0, "--"
for file in mseed_files:
    print "\n", file,
    try:
        stream = read(file)
    except:
        continue
    stream.merge(-1)
    stream.sort()
    stats = stream[0].stats
    pick_file = "%s_%s_%s.picks" % (
        stats.starttime.year, stats.starttime.strftime("%j"), stats.station)
    if not stats.station in station_list:
Esempio n. 14
0
    def computeStepCal(self):
        # cal duration needs to be divided by 10000 for step cals only.  This
        # only applies for when you are reading the cal duration from the
        # database.
        if(self.dbconn is not None):
            # divide by 10000 when getting the cal_duration from the database
            duration = self.cal_duration / 10000.0
        else:
            duration = self.cal_duration

        # Determine the type of sensor from the metadata
        sensor = self._determineSensorType()

        # ignores every location except for Z for triaxial STS-2s
        if((self.dbconn is not None) and ("Z" not in self.outChannel) and
           (sensor == "STS-2HG" or sensor == "STS-4B" or sensor == "STS-2")):
            print("Skipped " + str(self.outChannel) + ' ' + sensor)

        # get the poles values for the sensor type
        pz = self._pzvals(sensor)

        # read data for the calibration
        try:
            stOUT = Stream()
            stime = UTCDateTime(self.startdate) - 5 * 60
            stOUT = read(
                self.dataOutLoc, starttime=stime,
                endtime=stime + duration + 5 * 60 + 900
            )
            stOUT.merge()
            stIN = read(
                self.dataInLoc, starttime=stime,
                endtime=stime + duration + 5 * 60 + 900
            )
            stIN.merge()
            trIN = stIN[0]
            trOUT = stOUT[0]
            trOUT.filter('lowpass', freq=.1)
            trIN.filter('lowpass', freq=.1)
            trIN.detrend('constant')
            trIN.normalize()
            trOUT.detrend('constant')
            trOUT.normalize()
            temp = trOUT.copy()
            temp.trim(endtime=stime + int(duration / 2.))
            if temp.max() < 0.0:
                trOUT.data = -trOUT.data
        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to read data for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable read data
                                          for manual input file ''' +
                                          str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # compute corner (cutoff) frequency
            f = 1. / (2 * math.pi / abs(pz['poles'][0]))
            # compute damping ratio
            h = abs(pz['poles'][0].real) / abs(pz['poles'][0])
            sen = 10.0

            print (
                'Using: h=' + str(h) + ' f=' + str(f) + ' sen = ' + str(sen))

            x = numpy.array([f, h, sen])
            try:
                # compute best fit
                bf = fmin(self._resi, x, args=(trIN, trOUT),
                          xtol=10 ** -8, ftol=10 ** -3, disp=False)
            except:
                bf = x

        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to calculate {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                          perform corner freq, damping ratio,
                                          and best fit calculations for input
                                          file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            pazNOM = cornFreq2Paz(f, h)
            pazNOM['zeros'] = [0. + 0.j]

            pazPERT = cornFreq2Paz(bf[0], bf[1])
            pazPERT['zeros'] = [0]

            trOUTsimPert = trOUT.copy()
            trOUTsimPert.simulate(paz_remove=pazPERT)
            trOUTsimPert.trim(
                trOUTsimPert.stats.starttime + 50, trOUTsimPert.stats.endtime - 50)
            trOUTsimPert.detrend('constant')
            trOUTsimPert.normalize()

            trOUTsim = trOUT.copy()

            trOUTsim.simulate(paz_remove=pazNOM)
            trOUTsim.trim(
                trOUTsim.stats.starttime + 50, trOUTsim.stats.endtime - 50)
            trOUTsim.detrend('constant')
            trOUTsim.normalize()

            trIN.trim(trIN.stats.starttime + 50, trIN.stats.endtime - 50)
            trIN.detrend('constant')
            trIN.normalize()

            compOUT = sum((trOUTsim.data - trIN.data) ** 2)
            compOUTPERT = sum((trOUTsimPert.data - trIN.data) ** 2)
        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to do calculation for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('''(Manual Override) Unable to
                                         perform poles calculation or input
                                         file ''' + str(self.dataInLoc) +
                                          ' and output file ' +
                                          str(self.dataOutLoc))
        try:
            # create a plot for the step calibration and save it to the ./temp
            # directory.  This directory will be deleted when the program is
            # finished running.
            plt.clf()
            t = numpy.arange(
                0, trOUTsim.stats.npts / trOUTsim.stats.sampling_rate, trOUTsim.stats.delta)
            plt.plot(t, trIN.data, 'b', label='input')
            plt.plot(t, trOUTsim.data, 'k', label='h=' + str(round(h, 6)) +
                     ' f=' + str(round(f, 6)) + ' resi=' + str(round(compOUT, 6)))
            plt.plot(t, trOUTsimPert.data, 'g', label='h=' + str(round(bf[1], 6)) + ' f=' + str(
                round(bf[0], 6)) + ' resi=' + str(round(compOUTPERT, 6)))
            plt.xlabel('Time (s)')
            plt.ylabel('Cnts normalized')
            plt.title('Step Calibration ' + trOUT.stats.station + ' ' + str(
                trOUT.stats.starttime.year) + ' ' + str(trOUT.stats.starttime.julday).zfill(3))
            plt.legend(prop={'size': 6})
            plt.savefig('temp/' + str(trOUT.stats.station) + str(self.outChannel) + str(self.location) +
                        str(self.startdate.year) + str(self.julianday) + 'step.png', format="png", dpi=400)
        except:
            if(self.dbconn is not None):
                self.stepcal_logger.error('Unable to plot {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')
            else:
                self.stepcal_logger.error('(Manual Override) Unable to make plot for input file ' + str(
                    self.dataInLoc) + ' and output file ' + str(self.dataOutLoc))
        if(self.dbconn is not None):
            try:
                plt.close()
                # insert results into the database
                fin = open('temp/' + str(trOUT.stats.station) + str(self.outChannel) + str(
                    self.location) + str(self.startdate.year) + str(self.julianday) + 'step.png', 'rb')
                imgdata = fin.read()
                cur = self.dbconn.cursor()
                cur.execute('''INSERT INTO tbl_300calresults (fk_calibrationid,
                              nominal_cornerfreq, nominal_dampingratio, nominal_resi,
                              fitted_cornerfreq, fitted_dampingratio, fitted_resi,
                              outchannel, stepcal_img)
                              VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s)''',
                            [self.cal_id, round(f, 6), round(h, 6),
                             round(compOUT, 6), round(bf[0], 6),
                             round(bf[1], 6), round(compOUTPERT, 6),
                             str(self.outChannel), psycopg2.Binary(imgdata)])
                self.dbconn.commit()
            except:
                self.stepcal_logger.error('Unable to insert into database for {' +
                                          'network = ' + self.network +
                                          ', station = ' + self.station +
                                          ', sensor = ' + str(sensor) +
                                          ', location = ' + str(self.location) +
                                          ', channel = ' + str(self.outChannel) +
                                          '}')

        else:
            try:
                print('nominal corner freq = ' + str(round(f, 6)) +
                      ', nominal damping ratio = ' + str(round(h, 6)) +
                      ', nominal best fit = ' + str(round(compOUT, 6)) +
                      ', fitted corner freq = ' + str(round(bf[0], 6)) +
                      ', fitted damping ratio = ' + str(round(bf[1], 6)) +
                      ', pert best fit ' + str(round(compOUTPERT, 6)))
                plt.show()
                plt.close()
            except:
                print(
                    '(Manual Override) Error displaying calculation results.')
Esempio n. 15
0
    
    # merging
    try:
        st.merge(0)
    except Exception, e:
        summary.append("Error while merging:")
        summary.append(str(e))
        summary = "\n".join(summary)
        summary += "\n" + "\n".join(("%s=%s" % (k, v) for k, v in PAR.items()))
        open(SUMMARY, "at").write(summary + "\n")
        continue

    # preprocessing, keep original data for plotting at end
    for tr in st:
        tr.data = detrend(tr.data)
    st.simulate(paz_remove="self", paz_simulate=cornFreq2Paz(1.0), remove_sensitivity=False)
    st.sort()
    st_trigger = st.copy()
    st_trigger.filter("bandpass", freqmin=PAR.LOW, freqmax=PAR.HIGH, corners=1, zerophase=True)
    st.trim(T1, T2)
    st_trigger.trim(T1, T2)
    st_trigger.trigger("recstalta", sta=PAR.STA, lta=PAR.LTA)
    summary.append(str(st))

    # do the triggering
    trigger_list = []
    for tr in st_trigger:
        tr.stats.channel = "recstalta"
        max_len = PAR.MAXLEN * tr.stats.sampling_rate
        trigger_sample_list = triggerOnset(tr.data, PAR.ON, PAR.OFF, max_len=max_len)
        for on, off in trigger_sample_list:
Esempio n. 16
0
def xcorrEvents(starttime, endtime, network_id='*', station_id='*',
                location_id='', channel_id='EHZ', phase='P',
                time_window=(-1, 6), method='manual', merge=True):
    """
    @param method: 'manual' or 'auto' or None.
    """
    wildcard = "%s.%s.%s.%s" % (network_id, station_id,
                                location_id, channel_id)
    # PAZ of instrument to simulate, 2.0Hz corner-frequency, 0.707 damping
    inst = cornFreq2Paz(2.0)
    # get all events between start and end time
    client = Client("http://teide.geophysik.uni-muenchen.de:8080",
                    user="******", password="******")
    event_list = client.event.getList(datetime=(starttime, endtime),
                                      localisation_method=method)

    print "Fetching events ..."
    networks = {}
    for event in event_list:
        id = event['resource_name']
        print "  EVENT:", str(event['datetime']), id
        # request event resource
        res = client.event.getXMLResource(id)
        # fetch all picks with given phase
        pick_list = res.xpath("/event/pick[phaseHint='%s']" % phase)
        # cycle through picks
        streams = []
        for pick in pick_list:
            temp = {}
            try:
                dt = UTCDateTime(str(pick.time.value))
            except:
                continue
            sid = pick.waveform.attrib['stationCode']
            nid = pick.waveform.attrib['networkCode'] or 'BW'
            cid = pick.waveform.attrib['channelCode']
            lid = pick.waveform.attrib['locationCode']
            pid = '%s.%s.%s.%s' % (nid, sid, lid, cid)
            print "    PICK: %s - %s - %s" % (pid, phase, dt)
            if not fnmatch.filter([pid], wildcard):
                continue
            # generate station/network list
            networks.setdefault(nid, {})
            networks[nid].setdefault(sid, [])
            networks[nid][sid].append((event, dt))

    print
    print "Correlate events over each station ..."
    # cycle through all networks/stations/events
    for nid, stations in networks.iteritems():
        for sid, events in stations.iteritems():
            print "  %s.%s:" % (nid, sid)
            if len(events) < 2:
                print "    -> Skipping: Need at least 2 events per station"
                print
                continue
            streams = []
            for event in events:
                id = event[0]['resource_name']
                dt = event[1]
                # get station PAZ for this date time
                paz = client.station.getPAZ(nid, sid, dt, location_id,
                                            channel_id)
                if not paz:
                    print "!!! Missing PAZ for %s.%s for %s" % (nid, sid, dt)
                    continue
                # get waveforms
                try:
                    stream = client.waveform.getWaveform(nid, sid, location_id,
                                                         channel_id,
                                                         dt + time_window[0],
                                                         dt + time_window[1])
                except:
                    msg = "!!! Error fetching waveform for %s.%s.%s.%s for %s"
                    print msg % (nid, sid, location_id, channel_id, dt)
                    continue
                if merge:
                    stream.merge()
                for trace in stream:
                    # calculate zero mean
                    trace.data = trace.data - trace.data.mean()
                    # instrument correction
                    #trace.data = seisSim(trace.data, trace.stats.sampling_rate,
                    #                     paz, inst_sim=inst, water_level=50.0)
                    trace.data = bandpassZPHSH(trace.data,2.0,20.0,
                                               df=trace.stats.sampling_rate,
                                               corners=4)
                    print '    Got Trace:', trace
                # append
                streams.append((id, stream))
            # cross correlation over all prepared streams
            l = len(streams)
            if l < 2:
                print "    -> Skipping: Need at least 2 events per station"
                print
                fp.close()
                continue
            # output file
            filename = "%s.%s.txt" % (nid, sid)
            fp = open(filename, "w")
            # xcorr
            for i in range(0, l - 1):
                id1 = streams[i][0]
                tr1 = streams[i][1][0]
                for j in range(i + 1, l):
                    id2 = streams[j][0]
                    tr2 = streams[j][1][0]
                    # check sampling rate for both traces
                    if tr1.stats.sampling_rate != tr2.stats.sampling_rate:
                        print
                        print "!!! Sampling rate are not equal!"
                        continue
                    if tr1.stats.npts != tr2.stats.npts:
                        print
                        print "!!! Number of samples are not equal!"
                        continue
                    # divide by 2.0 as in eventcluster.c line 604
                    # remove last sample if npts is an odd number
                    delta = -1 * (tr1.stats.npts % 2)
                    winlen = int((tr1.stats.npts + delta) / 2.0)
                    shift, coe = xcorr(tr1.data[:delta].astype('float32'),
                                       tr2.data[:delta].astype('float32'),
                                       winlen)
                    fp.write("%d %d %.3f %d %s %s\n" % (i + 1, j + 1, coe,
                                                        shift, id1, id2))
            print
            fp.close()
import numpy as np
import matplotlib.pyplot as plt
from obspy.core import read
from obspy.signal import seisSim, cornFreq2Paz

onehzinst = cornFreq2Paz(1.0, damp=0.707) # 1Hz instrument
tr = read("http://examples.obspy.org/RJOB20090824.ehz")[0]
tr.data = tr.data - tr.data.mean()
sts2 = {'gain': 60077000.0,
        'poles': [(-0.037004000000000002+0.037016j),
                  (-0.037004000000000002-0.037016j),
                  (-251.33000000000001+0j),
                  (-131.03999999999999-467.29000000000002j),
                  (-131.03999999999999+467.29000000000002j)],
        'sensitivity': 2516778400.0,
        'zeros': [0j, 0j]}
data2 = seisSim(tr.data,
        tr.stats.sampling_rate, sts2,
        inst_sim=onehzinst,water_level=600.0)
data2 = data2 / sts2["sensitivity"] 

# The plotting, plain matplotlib
t = np.arange(tr.stats.npts) / tr.stats.sampling_rate
plt.subplot(211)
plt.plot(t, tr.data, 'k')
plt.ylabel('STS-2 [counts]')
#
plt.subplot(212)
plt.plot(t, data2, 'k')
plt.ylabel('1Hz Instrument [m/s]')
plt.xlabel('Time [s]')
from obspy.core import read
from obspy.signal import cornFreq2Paz
import numpy as np
import matplotlib.pyplot as plt

paz_sts2 = {
    'poles': [-0.037004 + 0.037016j, -0.037004 - 0.037016j, -251.33 + 0j,
              - 131.04 - 467.29j, -131.04 + 467.29j],
    'zeros': [0j, 0j],
    'gain': 60077000.0,
    'sensitivity': 2516778400.0}
paz_1hz = cornFreq2Paz(1.0, damp=0.707)  # 1Hz instrument
paz_1hz['sensitivity'] = 1.0

st = read()
# make a copy to keep our original data
st_orig = st.copy()

# Simulate instrument given poles, zeros and gain of
# the original and desired instrument
st.simulate(paz_remove=paz_sts2, paz_simulate=paz_1hz)


tr = st[0]
tr_orig = st_orig[0]

t = np.arange(tr.stats.npts) / tr.stats.sampling_rate

plt.subplot(211)
plt.plot(t, tr_orig.data, 'k')
plt.ylabel('STS-2 [counts]')
Esempio n. 19
0
import pickle, urllib
from obspy.core import UTCDateTime
from obspy.signal.array_analysis import sonic
from obspy.signal import cornFreq2Paz

# Load data
stream = pickle.load(urllib.urlopen("http://examples.obspy.org/agfa.dump"))
print stream
print stream[0].stats

#
# Instrument correction to 1Hz corner frequency
paz1hz = cornFreq2Paz(1.0, damp=0.707)
stream.simulate(paz_remove='self', paz_simulate=paz1hz)

# Execute sonic
arguments = dict(
        # slowness grid: X min, X max, Y min, Y max, Slow Step
        sll_x=-3.0, slm_x=3.0, sll_y=-3.0, slm_y=3.0, sl_s=0.03,
        # sliding window propertieds
        win_len=1.0, win_frac=0.05,
        # frequency properties
        frqlow=1.0, frqhigh=8.0, prewhiten=0,
        # restrict output
        semb_thres=-1e9, vel_thres=-1e9, verbose=True, timestamp='mlabhour',
        stime=UTCDateTime("20080217110515"),
    etime=UTCDateTime("20080217110545")
)
# Perform beamforming with previously set arguments.
out = sonic(stream, **arguments)
Esempio n. 20
0
from obspy.signal import cornFreq2Paz
import numpy as np
import matplotlib.pyplot as plt

paz_sts2 = {
    'poles': [
        -0.037004 + 0.037016j, -0.037004 - 0.037016j, -251.33 + 0j,
        -131.04 - 467.29j, -131.04 + 467.29j
    ],
    'zeros': [0j, 0j],
    'gain':
    60077000.0,
    'sensitivity':
    2516778400.0
}
paz_1hz = cornFreq2Paz(1.0, damp=0.707)  # 1Hz instrument
paz_1hz['sensitivity'] = 1.0

st = read()
# make a copy to keep our original data
st_orig = st.copy()

# Simulate instrument given poles, zeros and gain of
# the original and desired instrument
st.simulate(paz_remove=paz_sts2, paz_simulate=paz_1hz)

tr = st[0]
tr_orig = st_orig[0]

t = np.arange(tr.stats.npts) / tr.stats.sampling_rate
Esempio n. 21
0
import numpy as np
import matplotlib.pyplot as plt
from obspy.core import UTCDateTime
from obspy.arclink import Client
from obspy.signal import cornFreq2Paz, seisSim

# Retrieve data via ArcLink
# please provide a valid email address for the keyword user
client = Client(user="******")
t = UTCDateTime("2009-08-24 00:20:03")
st = client.getWaveform('BW', 'RJOB', '', 'EHZ', t, t + 30)
paz = client.getPAZ('BW', 'RJOB', '', 'EHZ', t)
paz = paz.values()[0]

# 1Hz instrument
one_hertz = cornFreq2Paz(1.0)
# Correct for frequency response of the instrument
res = seisSim(st[0].data.astype('float32'),
              st[0].stats.sampling_rate,
              paz,
              inst_sim=one_hertz)
# Correct for overall sensitivity
res = res / paz['sensitivity']

# Plot the seismograms
sec = np.arange(len(res)) / st[0].stats.sampling_rate
plt.subplot(211)
plt.plot(sec, st[0].data, 'k')
plt.title("%s %s" % (st[0].stats.station, t))
plt.ylabel('STS-2')
plt.subplot(212)
Esempio n. 22
0
def s2p(sec, trace):
    """Convert seconds to samples with the sampling rate of trace object"""
    return int(sec * trace.stats.sampling_rate)

def trId(stats):
    return stats.endtime, "%s%s%s%f" % (stats.network, stats.station,
                                        stats.channel, stats.sampling_rate)

mseed_files = sys.argv[1:]
if mseed_files == []:
    print __doc__
    sys.exit(1)

client = Client()

inst = cornFreq2Paz(1.0)
nfft = 4194304 # next nfft of 5h
station_list = []
last_endtime, last_id = 0, "--"
for file in mseed_files:
    print "\n", file,
    try:
        stream = read(file)
    except:
        continue
    stream.merge(-1)
    stream.sort()
    stats = stream[0].stats
    pick_file = "%s_%s_%s.picks" % (stats.starttime.year,
                                    stats.starttime.strftime("%j"),
                                    stats.station)