예제 #1
0
    def test_relcal_sts2_vs_unknown(self):
        """
        Test relative calibration of unknown instrument vs STS2 in the same
        time range. Window length is set to 20 s, smoothing rate to 10.
        """
        st1 = read(os.path.join(self.path, 'ref_STS2'))
        st2 = read(os.path.join(self.path, 'ref_unknown'))
        calfile = os.path.join(self.path, 'STS2_simp.cal')

        freq, amp, phase = relcalstack(st1, st2, calfile, 20, smooth=10,
                                       save_data=False)

        # read in the reference responses
        un_resp = np.loadtxt(os.path.join(self.path, 'unknown.resp'))
        kn_resp = np.loadtxt(os.path.join(self.path, 'STS2.refResp'))

        # test if freq, amp and phase match the reference values
        np.testing.assert_array_almost_equal(freq, un_resp[:, 0],
                                             decimal=4)
        np.testing.assert_array_almost_equal(freq, kn_resp[:, 0],
                                             decimal=4)
        np.testing.assert_array_almost_equal(amp, un_resp[:, 1],
                                             decimal=4)
        np.testing.assert_array_almost_equal(phase, un_resp[:, 2],
                                             decimal=4)
예제 #2
0
def test_FDSN_network():
    (options, args, parser) = command_parse()
    input_dics = read_input_command(parser)
    # Changing the input_dics values for testing
    input_dics['min_date'] = '2011-03-01'
    input_dics['max_date'] = '2011-03-20'
    input_dics['min_mag'] = 8.9
    input_dics['datapath'] = 'test_%s' % dir_name
    input_dics['net'] = 'TA'
    input_dics['sta'] = 'Z3*'
    input_dics['cha'] = 'BHZ'
    input_dics['req_parallel'] = 'Y'
    input_dics['req_np'] = 4

    events = get_Events(input_dics, 'event-based')
    assert len(events) == 1

    FDSN_network(input_dics, events)

    st_raw = read(os.path.join(input_dics['datapath'],
                               '2011-03-01_2011-03-20',
                               '20110311_1',
                               'BH_RAW', '*'))
    assert len(st_raw) == 7

    st_wilber = read(os.path.join('tests', 'fdsn_waveforms', 'TA*'))

    for sta in ['Z35A', 'Z37A', 'Z39A']:
        tr_raw = st_raw.select(station=sta)[0]
        tr_wilber = st_wilber.select(station=sta)[0]
        tr_diff = abs(tr_raw.data - tr_wilber.data)
        assert max(tr_diff) == 0.
예제 #3
0
def adj_sac_name(i, sta_name_i, yspec_in_names_i, path1):
    """
    Adjusting SAC names
    It is meant to be used in parallel
    """
    if (yspec_in_names_i[0] - float(sta_name_i[5])) > 0.01:
        print 'ERROR: Difference in latitude: %s' % (yspec_in_names_i[0] - float(sta_name_i[5]))
    if (yspec_in_names_i[1] - float(sta_name_i[6])) > 0.01:
        print 'ERROR: Difference in longitude: %s' % (yspec_in_names_i[1] - float(sta_name_i[6]))
    for chan in ['BHE', 'BHN', 'BHZ']:
        tr = read(os.path.join(path1, 'SAC', 'dis.RS' + '%02d' % (i+1) + '..' + chan))[0]
        tr.write(os.path.join(path1, 'SAC_realName', 'grf.%s.%s.%s.x00.%s' % (sta_name_i[0], sta_name_i[1],
                                                                              sta_name_i[2], chan)), format='SAC')
        tr_new = read(os.path.join(path1, 'SAC_realName', 'grf.%s.%s.%s.x00.%s' % (sta_name_i[0], sta_name_i[1],
                                                                                   sta_name_i[2], chan)))[0]
        tr_new.stats.network = sta_name_i[0]
        tr_new.stats.station = sta_name_i[1]
        tr_new.stats.location = sta_name_i[2]
        tr_new.stats.channel = chan
        tr_new.stats.sac.stla = float(sta_name_i[5])
        tr_new.stats.sac.stlo = float(sta_name_i[6])
        tr_new.stats.sac.stel = float(sta_name_i[7])
        tr_new.stats.sac.stdp = float(sta_name_i[8])

        tr_new.stats.sac.evla = float(sta_name_i[9])
        tr_new.stats.sac.evlo = float(sta_name_i[10])
        tr_new.stats.sac.evdp = float(sta_name_i[11])
        tr_new.write(os.path.join(path1, 'SAC_realName', 'grf.%s.%s.%s.x00.%s' % (sta_name_i[0], sta_name_i[1],
                                                                                  sta_name_i[2], chan)),
                     format='SAC')
예제 #4
0
    def test_issue341(self):
        """
        Tests issue #341

        Read/write of MiniSEED files with huge sampling rates/delta values.
        """
        tempfile = NamedTemporaryFile().name
        # 1 - sampling rate
        st = read()
        tr = st[0]
        tr.stats.sampling_rate = 1000000000.0
        tr.write(tempfile, format="MSEED")
        # read again
        st = read(tempfile)
        self.assertEquals(st[0].stats.sampling_rate, 1000000000.0)
        # 2 - delta
        st = read()
        tr = st[0]
        tr.stats.delta = 10000000.0
        tr.write(tempfile, format="MSEED")
        # read again
        st = read(tempfile)
        self.assertAlmostEquals(st[0].stats.delta, 10000000.0, 0)
        # clean up
        os.remove(tempfile)
예제 #5
0
파일: tsLib.py 프로젝트: ChrisBail/NEW
def getChannelWaveformFiles (network, station, location, channel, starttime, endtime, removeTrend, performInstrumentCorrection,applyScale,deconFilter1, deconFilter2, deconFilter3, deconFilter4, waterLevel, unit, client, fileTag):

   #
   # stream holds the final stream
   #
   thisStartTime = UTCDateTime(starttime)
   thisEndTime   = UTCDateTime(endtime)
   stream   = Stream()
   streamIn = Stream()
   try:
      #
      # read in the files to a stream
      #
      print "[INFO] checking:",fileTag
      if performInstrumentCorrection:
         streamIn       = read(fileTag, starttime=thisStartTime, endtime=thisEndTime, nearest_sample=True, apply_calib=False )
      else:
         print "[INFO] Apply scaling"
         streamIn       = read(fileTag, starttime=thisStartTime, endtime=thisEndTime, nearest_sample=True, apply_calib=applyScale )
      #print "STREAM IN",fileTag, starttime, endtime

   except Exception, e:
      print str(e)
      print "[ERROR] client.get_waveforms",network, station, location, channel, starttime, endtime
      return(None)
예제 #6
0
파일: test_core.py 프로젝트: egdorf/obspy
    def test_writingSUFileWithNoHeader(self):
        """
        If the trace has no trace.su attribute, one should still be able to
        write a SeismicUnix file.

        This is not recommended because most Trace.stats attributes will be
        lost while writing SU.
        """
        st = read()
        del st[1:]
        st[0].data = np.require(st[0].data, "float32")
        outfile = NamedTemporaryFile().name
        st.write(outfile, format="SU")
        st2 = read(outfile)
        os.remove(outfile)
        # Compare new and old stream objects. All the other header attributes
        # will not be set.
        np.testing.assert_array_equal(st[0].data, st2[0].data)
        self.assertEqual(st[0].stats.starttime, st2[0].stats.starttime)
        self.assertEqual(st[0].stats.endtime, st2[0].stats.endtime)
        self.assertEqual(st[0].stats.sampling_rate, st2[0].stats.sampling_rate)
        # Writing and reading this new stream object should not change
        # anything.
        st2.write(outfile, format="SU")
        st3 = read(outfile)
        os.remove(outfile)
        np.testing.assert_array_equal(st2[0].data, st3[0].data)
        # Remove the su attributes because they will not be equal due to lazy
        # header attributes.
        del st2[0].stats.su
        del st3[0].stats.su
        self.assertEqual(st2[0].stats, st3[0].stats)
예제 #7
0
    def test_xcorrPickCorrection(self):
        """
        Test cross correlation pick correction on a set of two small local
        earthquakes.
        """
        st1 = read(os.path.join(self.path,
                                'BW.UH1._.EHZ.D.2010.147.a.slist.gz'))
        st2 = read(os.path.join(self.path,
                                'BW.UH1._.EHZ.D.2010.147.b.slist.gz'))

        tr1 = st1.select(component="Z")[0]
        tr2 = st2.select(component="Z")[0]
        t1 = UTCDateTime("2010-05-27T16:24:33.315000Z")
        t2 = UTCDateTime("2010-05-27T16:27:30.585000Z")

        dt, coeff = xcorrPickCorrection(t1, tr1, t2, tr2, 0.05, 0.2, 0.1)
        self.assertAlmostEquals(dt, -0.014459080288833711)
        self.assertAlmostEquals(coeff, 0.91542878457939791)
        dt, coeff = xcorrPickCorrection(t2, tr2, t1, tr1, 0.05, 0.2, 0.1)
        self.assertAlmostEquals(dt, 0.014459080288833711)
        self.assertAlmostEquals(coeff, 0.91542878457939791)
        dt, coeff = xcorrPickCorrection(t1, tr1, t2, tr2, 0.05, 0.2, 0.1,
                filter="bandpass",
                filter_options={'freqmin': 1, 'freqmax': 10})
        self.assertAlmostEquals(dt, -0.013025086360067755)
        self.assertAlmostEquals(coeff, 0.98279277273758803)
예제 #8
0
    def computeSineCal(self):
        try:
            # Read in BH and BC
            dataIN = read(self.dataInLoc)
            dataOUT = read(self.dataOutLoc)
            # Convert start date to UTC
            stime = UTCDateTime(str(self.startdate))

            # Trim data to only grab the sine calibration
            dataIN.trim(starttime=stime, endtime=stime + self.cal_duration)
            dataOUT.trim(starttime=stime, endtime=stime + self.cal_duration)

            # Calculate RMS of both traces and divide
            dataINRMS = math.sqrt(2.) * sum(numpy.square(dataIN[0].data))
            dataINRMS /= float(dataIN[0].stats.npts)
            dataOUTRMS = math.sqrt(2.) * sum(numpy.square(dataOUT[0].data))
            dataINRMS /= float(dataOUT[0].stats.npts)

            if(self.dbconn is not None):
                # Write results to database
                cur = self.dbconn.cursor()
                cur.execute("""INSERT INTO tbl_310calresults (fk_calibrationid,
                            input_rms, output_rms, outchannel, coil_constant)
                            VALUES (""" + "'" + str(self.cal_id) + "', '" +
                            str(dataINRMS) + "', '" + str(dataOUTRMS) +
                            "', '" + str(self.outChannel) + "', '" +
                            str(dataINRMS / dataOUTRMS) + "')")
                self.dbconn.commit()
            else:
                print('input rms = ' + str(dataINRMS) +
                      ', output rms = ' + str(dataOUTRMS) +
                      ', coil constant = ' + str(dataINRMS / dataOUTRMS))
        except:
            self.sinecal_logger.error("Unexpected error:", sys.exc_info()[0])
 def test_allDataTypesAndEndiansInMultipleFiles(self):
     """
     Tests writing all different types. This is an test which is independent
     of the read method. Only the data part is verified.
     """
     file = os.path.join(self.path, "data", \
                         "BW.BGLD.__.EHE.D.2008.001.first_record")
     tempfile = NamedTemporaryFile().name
     # Read the data and copy them
     st = read(file)
     data_copy = st[0].data.copy()
     # Float64, Float32, Int32, Int24, Int16, Char
     encodings = {5: "f8", 4: "f4", 3: "i4", 0: "S1", 1: "i2"}
     byteorders = {0: '<', 1: '>'}
     for byteorder, btype in byteorders.iteritems():
         for encoding, dtype in encodings.iteritems():
             # Convert data to floats and write them again
             st[0].data = data_copy.astype(dtype)
             st.write(tempfile, format="MSEED", encoding=encoding,
                      reclen=256, byteorder=byteorder)
             # Read the first record of data without header not using ObsPy
             s = open(tempfile, "rb").read()
             data = np.fromstring(s[56:256], dtype=btype + dtype)
             np.testing.assert_array_equal(data, st[0].data[:len(data)])
             # Read the binary chunk of data with ObsPy
             st2 = read(tempfile)
             np.testing.assert_array_equal(st2[0].data, st[0].data)
     os.remove(tempfile)
예제 #10
0
파일: test_core.py 프로젝트: msimon00/obspy
 def test_issue390(self):
     """
     Read all SAC headers if debug_headers flag is enabled.
     """
     # 1 - binary SAC
     tr = read(self.file, headonly=True, debug_headers=True)[0]
     self.assertEqual(tr.stats.sac.nzyear, 1978)
     self.assertEqual(tr.stats.sac.nzjday, 199)
     self.assertEqual(tr.stats.sac.nzhour, 8)
     self.assertEqual(tr.stats.sac.nzmin, 0)
     self.assertEqual(tr.stats.sac.nzsec, 0)
     self.assertEqual(tr.stats.sac.nzmsec, 0)
     self.assertEqual(tr.stats.sac.delta, 1.0)
     self.assertEqual(tr.stats.sac.scale, -12345.0)
     self.assertEqual(tr.stats.sac.npts, 100)
     self.assertEqual(tr.stats.sac.knetwk, '-12345  ')
     self.assertEqual(tr.stats.sac.kstnm, 'STA     ')
     self.assertEqual(tr.stats.sac.kcmpnm, 'Q       ')
     # 2 - ASCII SAC
     tr = read(self.filexy, headonly=True, debug_headers=True)[0]
     self.assertEqual(tr.stats.sac.nzyear, -12345)
     self.assertEqual(tr.stats.sac.nzjday, -12345)
     self.assertEqual(tr.stats.sac.nzhour, -12345)
     self.assertEqual(tr.stats.sac.nzmin, -12345)
     self.assertEqual(tr.stats.sac.nzsec, -12345)
     self.assertEqual(tr.stats.sac.nzmsec, -12345)
     self.assertEqual(tr.stats.sac.delta, 1.0)
     self.assertEqual(tr.stats.sac.scale, -12345.0)
     self.assertEqual(tr.stats.sac.npts, 100)
     self.assertEqual(tr.stats.sac.knetwk, '-12345  ')
     self.assertEqual(tr.stats.sac.kstnm, 'sta     ')
     self.assertEqual(tr.stats.sac.kcmpnm, 'Q       ')
예제 #11
0
파일: test_client.py 프로젝트: egdorf/obspy
 def test_saveWaveformCompressed(self):
     """
     Tests saving compressed and not unpacked bzip2 files to disk.
     """
     mseedfile = NamedTemporaryFile(suffix='.bz2').name
     fseedfile = NamedTemporaryFile(suffix='.bz2').name
     try:
         # initialize client
         client = Client()
         start = UTCDateTime(2008, 1, 1, 0, 0)
         end = start + 1
         # MiniSEED
         client.saveWaveform(mseedfile, 'GE', 'APE', '', 'BHZ', start, end,
                             unpack=False)
         # check if compressed
         self.assertEquals(open(mseedfile, 'rb').read(2), 'BZ')
         # importing via read should work too
         read(mseedfile)
         # Full SEED
         client.saveWaveform(fseedfile, 'GE', 'APE', '', 'BHZ', start, end,
                             format="FSEED", unpack=False)
         # check if compressed
         self.assertEquals(open(fseedfile, 'rb').read(2), 'BZ')
         # importing via read should work too
         read(fseedfile)
     finally:
         os.remove(mseedfile)
         os.remove(fseedfile)
예제 #12
0
def GFSelectZ(stlat,stlon,hdir): 
    dist = locations2degrees(eplat,eplon,stlat,stlon) 
    
    
    dist_str = str(int(dist*10.)/2*2+1)#Some GFs have only odd dists.
    #dist_str = str(int(dist*10.))#Some GFs have only odd dists.
    dist_form = dist_str.zfill(4)    
     
    ## Loading files
    trPP = read(hdir + "PP/GF." + dist_form + ".SY.LHZ.SAC"  )[0]    
    #trPP.data -= trPP.data[0]
    
    
    trRR = read(hdir + "RR/GF." + dist_form + ".SY.LHZ.SAC"  )[0] 
    #trRR.data -= trRR.data[0]
   
    
    trRT = read(hdir + "RT/GF." + dist_form + ".SY.LHZ.SAC"  )[0] 
    #trRT.data -= trRT.data[0]
    
    
    
    trTT = read(hdir + "TT/GF." + dist_form + ".SY.LHZ.SAC"  )[0] 
    #trTT.data -= trTT.data[0]
    
    

                                  
                                  
    return trPP, trRR, trRT,  trTT                           
  def test_polarization(self):
    self.mat = np.array([[1,0,0],[0,-1,1],[1,1,1]])
    l1, l2, l3 = np.sqrt(2), 1, -np.sqrt(2)
    expRect = 1 - (l2+l3)/(2*l1)
    expPlan = 1 - (2*l3)/(l1+l2)

    rect, plan, lambda_max = polarization_analysis(self.mat,plot=False)
    self.assertAlmostEquals(lambda_max,l1,places=6)
    self.assertAlmostEquals(rect,expRect,places=6)
    self.assertAlmostEquals(plan,expPlan,places=6)

    file = "/home/nadege/waveloc/data/Piton/2011-02-02/2011-02-02T00:00:00.YA.UV15.HHZ.filt.mseed"
    cmin = utcdatetime.UTCDateTime("2011-02-02T00:58:47.720000Z")-15
    cmax = utcdatetime.UTCDateTime("2011-02-02T00:58:47.720000Z")+135

    ponset = 1400 
    st_z = read(file,starttime=cmin,endtime=cmax)
    tr_z = st_z[0].data[ponset-10:ponset+30]
 
    file_n = "%s/HHN/*%s*HHN*.filt.*"%(os.path.dirname(file),st_z[0].stats.station)
    file_e = "%s/HHE/*%s*HHE*.filt.*"%(os.path.dirname(file),st_z[0].stats.station)
    st_n = read(file_n,starttime=cmin,endtime=cmax)
    st_e = read(file_e,starttime=cmin,endtime=cmax)
    tr_n = st_n[0].data[ponset-10:ponset+30]
    tr_e = st_e[0].data[ponset-10:ponset+30]

    x=np.array([tr_z,tr_n,tr_e])
    print tr_e.shape
    mat = np.cov(x)
    rect, plan, lambda_max = polarization_analysis(mat,plot=True)
    from obspy.signal.polarization import eigval
    leigenv1, leigenv2, leigenv3, rect, plan, dleigenv, drect, dplan = eigval(tr_e,tr_n,tr_z,[1,1,1,1,1])
    print lambda_max
    print leigenv1, leigenv2, leigenv3
예제 #14
0
파일: test_core.py 프로젝트: egdorf/obspy
 def test_writeStreamViaObsPy(self):
     """
     Write streams, i.e. multiple files via L{obspy.Trace}
     """
     testdata = np.array([111, 111, 111, 111, 111, 109, 106, 103, 103,
                          110, 121, 132, 139])
     testfile = NamedTemporaryFile().name
     self.file = os.path.join(self.path, '3cssan.reg.8.1.RNON.wav')
     tr = read(self.file, format='WAV')[0]
     np.testing.assert_array_equal(tr.data[:13], testdata)
     # write
     st2 = Stream([Trace(), Trace()])
     st2[0].data = tr.data.copy()       # copy the data
     st2[1].data = tr.data.copy() // 2  # be sure data are different
     st2.write(testfile, format='WAV', framerate=7000)
     # read without giving the WAV format option
     base, ext = os.path.splitext(testfile)
     testfile0 = "%s%03d%s" % (base, 0, ext)
     testfile1 = "%s%03d%s" % (base, 1, ext)
     tr30 = read(testfile0)[0]
     tr31 = read(testfile1)[0]
     self.assertEqual(tr30.stats, tr.stats)
     self.assertEqual(tr31.stats, tr.stats)
     np.testing.assert_array_equal(tr30.data[:13], testdata)
     np.testing.assert_array_equal(tr31.data[:13], testdata // 2)
     os.remove(testfile)
     os.remove(testfile0)
     os.remove(testfile1)
예제 #15
0
파일: tests.py 프로젝트: RDePlaen/MSNoise
 def test_022_check_content(self):
     from obspy.core import read
     from numpy.testing import assert_allclose
     from ..api import connect, get_filters, get_station_pairs, \
         get_components_to_compute
     db = connect()
     for filter in get_filters(db):
         for components in get_components_to_compute(db):
             for (sta1, sta2) in get_station_pairs(db):
                 pair = "%s_%s_%s_%s" % (sta1.net, sta1.sta,
                                         sta2.net, sta2.sta)
                 tmp1 = os.path.join("STACKS",
                                     "%02i" % filter.ref,
                                     "001_DAYS",
                                     components,
                                     pair,
                                     "2010-09-01.MSEED")
                 tmp2 = os.path.join("STACKS",
                                     "%02i" % filter.ref,
                                     "001_DAYS",
                                     components,
                                     pair,
                                     "2010-09-01.SAC")
                 tmp1 = read(tmp1)
                 tmp2 = read(tmp2)
                 assert_allclose(tmp1[0].data, tmp2[0].data)
예제 #16
0
파일: test_core.py 프로젝트: egdorf/obspy
 def test_readAndWriteViaObsPy(self):
     """
     Read and Write files via L{obspy.Stream}
     """
     # read trace
     tr = read(self.file)[0]
     # write comparison trace
     st2 = Stream()
     st2.traces.append(Trace())
     tr2 = st2[0]
     tr2.data = copy.deepcopy(tr.data)
     tr2.stats = copy.deepcopy(tr.stats)
     tempfile = NamedTemporaryFile().name
     st2.write(tempfile, format="SAC")
     # read comparison trace
     tr3 = read(tempfile)[0]
     os.remove(tempfile)
     # check if equal
     self.assertEqual(tr3.stats["station"], tr.stats["station"])
     self.assertEqual(tr3.stats.npts, tr.stats.npts)
     self.assertEqual(tr.stats["sampling_rate"], tr.stats["sampling_rate"])
     self.assertEqual(tr.stats.get("channel"), tr.stats.get("channel"))
     self.assertEqual(tr.stats.get("starttime"), tr.stats.get("starttime"))
     self.assertEqual(tr.stats.sac.get("nvhdr"), tr.stats.sac.get("nvhdr"))
     np.testing.assert_equal(tr.data, tr3.data)
예제 #17
0
파일: test_core.py 프로젝트: egdorf/obspy
 def test_notUsedButGivenHeaders(self):
     """
     Test case for #188
     """
     tr1 = read(self.file)[0]
     not_used = [
         "xminimum",
         "xmaximum",
         "yminimum",
         "ymaximum",
         "unused6",
         "unused7",
         "unused8",
         "unused9",
         "unused10",
         "unused11",
         "unused12",
     ]
     for i, header_value in enumerate(not_used):
         tr1.stats.sac[header_value] = i
     sac_file = NamedTemporaryFile().name
     tr1.write(sac_file, "SAC")
     tr2 = read(sac_file)[0]
     os.remove(sac_file)
     for i, header_value in enumerate(not_used):
         self.assertEquals(int(tr2.stats.sac[header_value]), i)
예제 #18
0
 def test_readGzip2File(self):
     """
     Tests reading gzip compressed waveforms.
     """
     path = os.path.dirname(__file__)
     st1 = read(os.path.join(path, 'data', 'tspair.ascii.gz'))
     st2 = read(os.path.join(path, 'data', 'tspair.ascii'))
     self.assertTrue(st1 == st2)
예제 #19
0
 def test_readBzip2File(self):
     """
     Tests reading bzip2 compressed waveforms.
     """
     path = os.path.dirname(__file__)
     st1 = read(os.path.join(path, 'data', 'slist.ascii.bz2'))
     st2 = read(os.path.join(path, 'data', 'slist.ascii'))
     self.assertTrue(st1 == st2)
예제 #20
0
파일: test_core.py 프로젝트: jshridha/obspy
 def test_read_seisanVsReference(self):
     """
     Test for #970
     """
     _file = os.path.join(self.path, "SEISAN_Bug", "2011-09-06-1311-36S.A1032_001BH_Z")
     st = read(_file, format="SEISAN")
     _file_ref = os.path.join(self.path, "SEISAN_Bug", "2011-09-06-1311-36S.A1032_001BH_Z_MSEED")
     st_ref = read(_file_ref, format="MSEED")
     self.assertTrue(np.allclose(st[0].data, st_ref[0].data))
예제 #21
0
파일: test_core.py 프로젝트: egdorf/obspy
 def test_readXYwriteXYViaObspy(self):
     """
     Write/Read files via L{obspy.Stream}
     """
     tr = read(self.filexy, format="SACXY")[0]
     tempfile = NamedTemporaryFile().name
     tr.write(tempfile, format="SACXY")
     tr1 = read(tempfile)[0]
     os.remove(tempfile)
     self.assertTrue(tr == tr1)
예제 #22
0
def test_FDSN_ARC_IC():
    (options, args, parser) = command_parse()
    input_dics = read_input_command(parser)
    # Changing the input_dics values for testing
    input_dics['min_date'] = '2011-03-01'
    input_dics['max_date'] = '2011-03-20'
    input_dics['min_mag'] = 8.9
    input_dics['datapath'] = 'test_%s' % dir_name
    input_dics['net'] = 'TA'
    input_dics['sta'] = 'Z3*'
    input_dics['cha'] = 'BHZ'
    input_dics['req_parallel'] = 'Y'
    input_dics['req_np'] = 4

    FDSN_ARC_IC(input_dics, input_dics['fdsn_base_url'])

    st_cor = read(os.path.join(input_dics['datapath'],
                               '2011-03-01_2011-03-20',
                               '20110311_1',
                               'BH', '*'))
    assert len(st_cor) == 7

    st_wilber = read(os.path.join('tests', 'fdsn_waveforms', 'TA*'))

    paz_35 = {'gain': 5.714000e+08,
              'sensitivity': 6.309070e+08,
              'zeros': (0.0, 0.0, 0.0),
              'poles': (-3.701000e-02+3.701000e-02j,
                        -3.701000e-02-3.701000e-02j,
                        -1.131000e+03+0.000000e+00j,
                        -1.005000e+03+0.000000e+00j,
                        -5.027000e+02+0.000000e+00j)}

    for sta in ['Z35A', 'Z37A', 'Z39A']:
        tr_cor = st_cor.select(station=sta)[0]
        tr_wilber = st_wilber.select(station=sta)[0]
        tr_wilber_corr = tr_wilber.copy()
        tr_wilber_corr.detrend()
        corr_wilber = seisSim(tr_wilber.data,
                              tr_wilber.stats.sampling_rate,
                              paz_remove=paz_35,
                              paz_simulate=None,
                              remove_sensitivity=True,
                              simulate_sensitivity=False,
                              water_level=600.,
                              zero_mean=True,
                              taper=True,
                              taper_fraction=0.05,
                              pre_filt=(0.008, 0.012, 3.0, 4.0),
                              pitsasim=False,
                              sacsim=True)
        tr_wilber_corr.data = corr_wilber
        tr_diff = abs(tr_cor.data - tr_wilber_corr.data)
        # amplitude of the traces is in the order of 1e6 or so
        assert max(tr_diff) < 0.00001
예제 #23
0
파일: test_core.py 프로젝트: Keita1/obspy
 def test_read_seisan_vs_reference(self):
     """
     Test for #970
     """
     _file = os.path.join(self.path, 'SEISAN_Bug',
                          '2011-09-06-1311-36S.A1032_001BH_Z')
     st = read(_file, format='SEISAN')
     _file_ref = os.path.join(self.path, 'SEISAN_Bug',
                              '2011-09-06-1311-36S.A1032_001BH_Z_MSEED')
     st_ref = read(_file_ref, format='MSEED')
     self.assertTrue(np.allclose(st[0].data, st_ref[0].data))
예제 #24
0
def read_seed(DATAFILE, BYTEOFFSET):
    """Read a SEED or miniSEED 'sd' datatype.
    """
    if BYTEOFFSET:
        with open(DATAFILE, 'rb') as f0:
            f0.seek(BYTEOFFSET)
            f1 = StringIO(f0.read())
            tr = read(f, format='MSEED')[0] #f1?
    else:
        tr = read(DATAFILE, format='MSEED')[0]

    return tr.data
예제 #25
0
파일: api.py 프로젝트: RDePlaen/MSNoise
def get_results(session, station1, station2, filterid, components, dates, mov_stack=1, format="stack"):
    export_format = get_config(session, "export_format")
    if format == "stack":
        stack = np.zeros(get_maxlag_samples(session))
        i = 0
        for date in dates:
            daystack = os.path.join(
                "STACKS",
                "%02i" % filterid,
                "%03i_DAYS" % mov_stack,
                components,
                "%s_%s" % (station1, station2),
                str(date),
            )
            # logging.debug('reading: %s' % daystack)
            if export_format == "BOTH":
                daystack += ".MSEED"
            elif export_format == "SAC":
                daystack += ".SAC"
            elif export_format == "MSEED":
                daystack += ".MSEED"
            try:
                st = read(daystack)
                if not np.any(np.isnan(st[0].data)) and not np.any(np.isinf(st[0].data)):
                    stack += st[0].data
                    i += 1
            except:
                pass
        if i > 0:
            return i, stack / i
        else:
            return 0, None

    elif format == "matrix":
        stack = np.zeros((len(dates), get_maxlag_samples(session))) * np.nan
        i = 0
        base = os.path.join(
            "STACKS", "%02i" % filterid, "%03i_DAYS" % mov_stack, components, "%s_%s" % (station1, station2), "%s"
        )
        if export_format == "BOTH":
            base += ".MSEED"
        elif export_format == "SAC":
            base += ".SAC"
        elif export_format == "MSEED":
            base += ".MSEED"
        for j, date in enumerate(dates):
            daystack = base % str(date)
            try:
                stack[j][:] = read(daystack)[0].data
                i += 1
            except:
                pass
        return i, stack
예제 #26
0
파일: test_core.py 프로젝트: egdorf/obspy
 def test_readDos(self):
     """
     Read file with dos newlines / encoding, that is
     Line Feed (LF) and Carriage Return (CR)
     see #355
     """
     filedos = os.path.join(self.path, 'data',
                            'loc_RJOB20050831023349_first100_dos.z')
     fileunix = os.path.join(self.path, 'data', 'loc_RJOB20050831023349.z')
     st = read(filedos, verify_chksum=True)
     st2 = read(fileunix, verify_chksum=True)
     np.testing.assert_equal(st[0].data, st2[0].data[:100])
     self.assertEqual(st[0].stats['station'], 'RJOB')
 def test_enforceSteim2WithSteim1asEncoding(self):
     """
     This tests whether the encoding kwarg overwrites the encoding in
     trace.stats.mseed.encoding.
     """
     file = os.path.join(self.path, "data",
                         "BW.BGLD.__.EHE.D.2008.001.first_record")
     st = read(file)
     self.assertEqual(st[0].stats.mseed.encoding, 'STEIM1')
     tempfile = NamedTemporaryFile().name
     st.write(tempfile, format='MSEED', encoding='STEIM2')
     st2 = read(tempfile)
     os.remove(tempfile)
     self.assertEqual(st2[0].stats.mseed.encoding, 'STEIM2')
예제 #28
0
파일: test_core.py 프로젝트: egdorf/obspy
 def test_readViaObsPy(self):
     """
     Read files via L{obspy.Trace}
     """
     testdata = np.array([64, 78, 99, 119, 123, 107,
                          72, 31, 2, 0, 30, 84, 141])
     tr = read(self.file)[0]
     self.assertEqual(tr.stats.npts, 2599)
     self.assertEqual(tr.stats['sampling_rate'], 7000)
     np.testing.assert_array_equal(tr.data[:13], testdata)
     tr2 = read(self.file, format='WAV')[0]
     self.assertEqual(tr2.stats.npts, 2599)
     self.assertEqual(tr2.stats['sampling_rate'], 7000)
     np.testing.assert_array_equal(tr.data[:13], testdata)
예제 #29
0
    def test_issue289(self):
        """
        Tests issue #289.

        Reading MiniSEED using start-/endtime outside of data should result in
        an empty Stream object.
        """
        # 1
        file = os.path.join(self.path, 'data', 'steim2.mseed')
        st = read(file, starttime=UTCDateTime() - 10, endtime=UTCDateTime())
        self.assertEqual(len(st), 0)
        # 2
        file = os.path.join(self.path, 'data', 'fullseed.mseed')
        st = read(file, starttime=UTCDateTime() - 10, endtime=UTCDateTime())
        self.assertEqual(len(st), 0)
예제 #30
0
 def test_readWithMissingBlockette010(self):
     """
     Reading a Full/Mini-SEED w/o blockette 010 but blockette 008.
     """
     # 1 - Mini-SEED
     file = os.path.join(self.path, 'data', 'blockette008.mseed')
     tr = read(file)[0]
     self.assertEqual('BW.PART..EHZ', tr.id)
     self.assertEqual(1642, tr.stats.npts)
     # 2 - full SEED
     file = os.path.join(self.path, 'data',
                         'RJOB.BW.EHZ.D.300806.0000.fullseed')
     tr = read(file)[0]
     self.assertEqual('BW.RJOB..EHZ', tr.id)
     self.assertEqual(412, tr.stats.npts)
예제 #31
0
from __future__ import print_function
from obspy.core import read, UTCDateTime
from obspy.core.util.geodetics import gps2DistAzimuth
from obspy.xseed import Parser
from math import log10

st = read("../data/LKBD.MSEED")

paz_wa = {
    'sensitivity': 2800,
    'zeros': [0j],
    'gain': 1,
    'poles': [-6.2832 - 4.7124j, -6.2832 + 4.7124j]
}

parser = Parser("../data/LKBD.dataless")
paz_le3d5s = parser.getPAZ("CH.LKBD..EHZ")

st.simulate(paz_remove=paz_le3d5s, paz_simulate=paz_wa, water_level=10)

t = UTCDateTime("2012-04-03T02:45:03")
st.trim(t, t + 50)

tr_n = st.select(component="N")[0]
ampl_n = max(abs(tr_n.data))
tr_e = st.select(component="E")[0]
ampl_e = max(abs(tr_e.data))
ampl = max(ampl_n, ampl_e)

sta_lat = 46.38703
sta_lon = 7.62714
def multiplex(filename):

    st = read(filename)

    for trace in st:
        path = os.path.join(_mseed_search_folder,
                            str(trace.stats.starttime.year),
                            trace.stats.network, trace.stats.station,
                            ('%s.D') % (trace.stats.channel))
        create_dir(path)

        diff = trace.stats.endtime.day - trace.stats.starttime.day
        rangediff = diff + 1

        if diff == 0:
            d = obspy.core.utcdatetime.UTCDateTime(trace.stats.starttime)
            finalname = DataDir.filename(trace, d)
            filepath = os.path.join(path, finalname)
            trace.write(filepath, format='MSEED', reclen=512)

        elif diff >= 1:
            for i in xrange(rangediff):
                mult = 60 * 60 * 24 * i

                if i == 0:
                    s1 = trace.stats.starttime
                    e1 = obspy.core.utcdatetime.UTCDateTime(year=s1.year,
                                                            month=s1.month,
                                                            day=s1.day,
                                                            hour=23,
                                                            minute=59,
                                                            second=59,
                                                            microsecond=999999)
                else:
                    s1 = trace.stats.starttime + mult
                    s1 = obspy.core.utcdatetime.UTCDateTime(year=s1.year,
                                                            month=s1.month,
                                                            day=s1.day,
                                                            hour=0,
                                                            minute=0,
                                                            microsecond=00)
                    e1 = obspy.core.utcdatetime.UTCDateTime(year=s1.year,
                                                            month=s1.month,
                                                            day=s1.day,
                                                            hour=23,
                                                            minute=59,
                                                            second=59,
                                                            microsecond=999999)
                if i == diff:
                    e1 = trace.stats.endtime

                d = obspy.core.utcdatetime.UTCDateTime(s1)
                finalname = DataDir.filename(trace, d)
                filepath = os.path.join(path, finalname)

                tr = trace.slice(s1, e1)
                tr.write(filepath, format='MSEED', reclen=512)
                print s1, ' to ', e1, jd, finalname, filepath
        #endif
    #endfor

    #hs+
    #  "remove" unused channels
    #
    channels = []

    for trace in st:
        channels.append(
            trace.stats.channel)  # get all channels from trace list

    channnels = sorted(channels)
    group = [['BHE', 'BHN', 'BHZ'], ['BH1', 'BH2', 'BHZ'],
             ['HHE', 'HHN', 'HHZ']]  # use only these
    used = []

    for chn in channels:  # build channel sets
        if len(used) > 0: break

        for i in range(3):
            if chn in group[i]:
                used = group[i]  # use this set
                break
    #endfor

    trace = st[0]
    path = os.path.join(_mseed_search_folder, str(trace.stats.starttime.year),
                        trace.stats.network, trace.stats.station)

    for chn in channels:
        if chn in used: continue

        # replace unused channels(files) with magic number '4711'

        dir = os.path.join(path, ('%s.D') % (chn))

        if os.path.exists(dir):
            files = os.listdir(dir)
            line = '4711'

            for file in files:
                Basic.writeTextFile(os.path.join(dir, file), line)
예제 #33
0

ffile= open('Results_Swept_Sine' + sta,'w')





for idx, pair in enumerate(zip(chans,stimes)):
    stime = pair[1] + 125.
    chan = pair[0]
    #stime = pair[1]
    etime = stime + 225.
    #etime = stime + 300.
    datapath = 'newdata/output_raw/2017/XX/' + sta + '/' + chan + '.D/XX*'
    st = read(datapath)
    st.decimate(5)
    st.decimate(2)
    stRef = read('newdata/Tcmpct/gto.seed')
    
    
    if chan == 'HHE':
       stRef = stRef.select(channel='EH2')
       
    elif chan ==  'HHN':
        stRef = stRef.select(channel='EH1')
        
    else:
        stRef = stRef.select(channel='EH0')
      
    st += stRef
예제 #34
0
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 29 12:19:56 2017

@author: horas
"""

from obspy.core import read
#from scipy.integrate import simps
#import numpy as np
#import matplotlib.pylab as plt
#from obspy.core import stream

st = read("KRK_full.MSEED")

print st
print '\n'
print st[0].data
print '\n'
print type(st)
print '\n'
print st.traces
print '\n'
print st[0].stats
print '\n'
print st[0].stats.delta, '|', st[0].stats.endtime
print '\n'
print st

print st[0]
예제 #35
0
def get_streams_gema_old(networks,
                         stations,
                         starttime,
                         endtime,
                         only_vertical_channel=False,
                         local_dir_name=None):
    if not local_dir_name:
        local_dir_name = "%s/mount" % (os.getenv("HOME"))

    r = requests.get(r'http://jsonip.com')
    public_ip = r.json()['ip']
    if (socket.gethostname() == 'maniedba') and (public_ip == "152.74.135.51"):
        local_buffer = "/home/gema/seiscomp3/var/lib/seedlink/buffer"
        local_archive = "/home/gema/seiscomp3/var/lib/archive "
    else:
        local_buffer = "%s/seiscomp_data_buffer" % (local_dir_name)
        local_archive = "%s/seiscomp_data_archive" % (local_dir_name)

    st = Stream()
    if only_vertical_channel:
        channels = "*Z"
    else:
        channels = "*"

    # read archive directory
    for network, station in zip(networks, stations):
        this_day = starttime
        while this_day <= endtime:
            pattern = '%s/%s/%s/%s/%s.D' % (local_archive,
                                            this_day.strftime("%Y"), network,
                                            station, channels)
            paths_ch = sorted(glob.glob(pattern))
            for path in paths_ch:
                pattern = "%s/*%s" % (path, this_day.strftime("%Y.%03j"))
                msfile_list = glob.glob(pattern)
                if len(msfile_list) > 0:
                    for msfile in msfile_list:
                        st += read(msfile,
                                   starttime=starttime,
                                   endtime=endtime)

            this_day += 86400

    gaps = st.get_gaps()

    # read buffer directory
    if UTCDateTime().now() - endtime <= 3600:
        for network, station in zip(networks, stations):
            path = '%s/%s/segments' % (local_buffer, station)
            name_list = os.listdir(path)
            full_list = [os.path.join(path, i) for i in name_list]
            time_sorted_list = sorted(full_list, key=os.path.getmtime)
            msfiles = time_sorted_list[-3::]
            for msfile in msfiles:
                st += read(msfile, starttime=starttime,
                           endtime=endtime).select(channel=channels)

        if len(st) > 1:
            st.merge(method=1,
                     interpolation_samples=-1,
                     fill_value='interpolate')

    return st, gaps
예제 #36
0
def slice_from_reading(reading_path,
                       waveforms_path,
                       slice_duration=5,
                       archive_definitions=[],
                       output_level=0):
    """
    Reads S-file on reading_path and slice relevant waveforms in waveforms_path
    :param reading_path:        string    path to S-file
    :param waveforms_path:      string    path to folder with waveform files
    :param slice_duration:      int       duration of the slice in seconds
    :param archive_definitions: list      list of archive definition tuples (see utils/seisan_reader.py)
    :param output_level:        int       0 - min output, 5 - max output, default - 0
    :return: -1                                  -    corrupted file
             [(obspy.core.trace.Trace, string)]  -    list of slice tuples: (slice, name of waveform file)
    """
    if output_level >= 5:
        logging.info('Reading file: ' + reading_path)

    try:
        events = nordic_reader.read_nordic(
            reading_path,
            True)  # Events tuple: (event.Catalog, [waveforms file names])
    except nordic_reader.NordicParsingError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1
    except ValueError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1
    except AttributeError as error:
        if output_level >= 2:
            logging.warning('In ' + reading_path + ': ' + str(error))
        return -1

    index = -1
    slices = []
    picks_line = "STAT SP IPHASW"
    for event in events[0].events:
        index += 1

        f = open(reading_path)
        l = [line.strip() for line in f]

        id = None
        picks_started = False
        picks_amount = len(event.picks)
        picks_read = 0
        picks_distance = []
        if config.seconds_high_precision:
            start_seconds = []
        for line in l:
            if picks_started and picks_read < picks_amount and len(line) >= 74:
                try:
                    dist = float(line[70:74])
                except ValueError as e:
                    dist = None
                picks_distance.append(dist)

                if config.seconds_high_precision:
                    try:
                        seconds = float(line[21:27])
                    except ValueError as e:
                        seconds = None
                    start_seconds.append(seconds)

            if len(line) > 73:
                title = line[0:6]
                if title == "ACTION":
                    id_title = line[56:59]
                    if id_title == "ID:":
                        id_str = line[59:73]
                        id = int(id_str)

            if len(line) > 25:
                if line[0:len(picks_line)] == picks_line:
                    picks_started = True

        # Min magnitude check
        if len(event.magnitudes) > 0:
            if event.magnitudes[0].mag < config.min_magnitude:
                continue

        # Max depth check
        if len(event.origins) > 0:
            if event.origins[0].depth is None:
                continue
            if event.origins[0].depth > config.max_depth:
                continue

        try:
            if len(event.picks) > 0:  # Only for files with picks
                if output_level >= 3:
                    logging.info('File: ' + reading_path + ' Event #' +
                                 str(index) + ' Picks: ' +
                                 str(len(event.picks)))

                picks_index = -1
                for pick in event.picks:
                    if output_level >= 3:
                        logging.info('\t' + str(pick))

                    picks_index += 1
                    if config.seconds_high_precision:
                        if picks_index < len(start_seconds):
                            start_seconds_pick = start_seconds[picks_index]
                        else:
                            start_seconds_pick = pick.time.second
                            print("OUT OF BOUNDS START SECONDS PICK")
                            print("FILE: " + reading_path)
                            print("PICKS: ")
                            for pick_print in event.picks:
                                print(str(pick_print))
                    else:
                        start_seconds_pick = pick.time.seconds
                    pick_time = UTCDateTime(pick.time.year, pick.time.month,
                                            pick.time.day, pick.time.hour,
                                            pick.time.minute,
                                            start_seconds_pick)

                    if picks_index < len(picks_distance) and picks_distance[
                            picks_index] is not None:
                        if picks_distance[picks_index] > config.max_dist:
                            continue

                    # Check phase
                    if pick.phase_hint != 'S' and pick.phase_hint != 'P':
                        logging.info('\t' + 'Neither P nor S phase. Skipping.')
                        continue

                    if output_level >= 3:
                        logging.info('\t' + 'Slices:')

                    # Checking archives
                    found_archive = False
                    if len(archive_definitions) > 0:
                        station = pick.waveform_id.station_code
                        station_archives = seisan.station_archives(
                            archive_definitions, station)

                        channel_slices = []
                        for x in station_archives:
                            if x[4] <= pick_time:
                                if x[5] is not None and pick_time > x[5]:
                                    continue
                                else:
                                    archive_file_path = seisan.archive_path(
                                        x, pick_time.year, pick_time.julday,
                                        config.archives_path, output_level)

                                    if os.path.isfile(archive_file_path):
                                        try:
                                            arch_st = read(archive_file_path)
                                        except TypeError as error:
                                            if output_level >= 2:
                                                logging.warning(
                                                    'In ' + archive_file_path +
                                                    ': ' + str(error))
                                            return -1

                                        # arch_st.normalize(global_max=config.global_max_normalizing)  # remove that
                                        # arch_st.filter("highpass", freq=config.highpass_filter_df)  # remove that
                                        # line later
                                        for trace in arch_st:
                                            pick_start_time = pick_time
                                            if trace.stats.starttime > pick_time or pick_time + slice_duration >= trace.stats.endtime:
                                                logging.info(
                                                    '\t\tArchive ' +
                                                    archive_file_path +
                                                    ' does not cover required slice interval'
                                                )
                                                continue

                                            shifted_time = pick_time - config.static_slice_offset
                                            end_time = shifted_time + slice_duration

                                            found_archive = True

                                            trace_slice = trace.slice(
                                                shifted_time, end_time)
                                            if output_level >= 3:
                                                logging.info('\t\t' +
                                                             str(trace_slice))

                                            trace_file = x[0] + str(
                                                x[4].year) + str(
                                                    x[4].julday
                                                ) + x[1] + x[2] + x[3]
                                            event_id = x[0] + str(
                                                x[4].year) + str(
                                                    x[4].julday) + x[2] + x[3]
                                            slice_name_station_channel = (
                                                trace_slice, trace_file, x[0],
                                                x[1], event_id,
                                                pick.phase_hint, id_str)

                                            # print("ID " + str(id_str))
                                            # if id_str == '20140413140958':
                                            # print(x[0])
                                            # if True:#x[0] == 'NKL':
                                            # trace.integrate()
                                            # trace_slice.integrate()
                                            # trace.normalize()
                                            # trace_slice.normalize()
                                            # print('FOUND ID! NORMALIZED')
                                            # print('ARCHIVE: ' + archive_file_path)
                                            # print('FILE: ' + trace_file)
                                            # print('SLICE: ' + str(trace_slice))
                                            # print('TIME: ' + str(shifted_time) + ' till ' + str(end_time))
                                            # print('TRACE: ' + str(trace))
                                            # print('DATA: ' + str(trace_slice.data))

                                            # trace_slice.filter("highpass", freq=config.highpass_filter_df)
                                            # patho = "/seismo/seisan/WOR/chernykh/plots/part/"
                                            # patho2 = "/seismo/seisan/WOR/chernykh/plots/whole/"

                                            # plt.plot(trace_slice.data)
                                            # plt.ylabel('Amplitude')
                                            # plt.savefig(patho + trace_file)
                                            # plt.figure()

                                            # plt.plot(trace.data)
                                            # plt.ylabel('Amplitude')
                                            # plt.savefig(patho2 + trace_file)
                                            # plt.figure()

                                            if len(trace_slice.data) >= 400:
                                                channel_slices.append(
                                                    slice_name_station_channel)

                    # Read and slice waveform
                    if found_archive:
                        if len(channel_slices) > 0:
                            slices.append(channel_slices)
                        continue

        except ValueError as error:
            if output_level >= 2:
                logging.warning('In ' + reading_path + ': ' + str(error))
            continue

    return sort_slices(slices)
예제 #37
0
 def test_issue296(self):
     """
     Tests issue #296.
     """
     tempfile = NamedTemporaryFile().name
     # 1 - transform to np.float64 values
     st = read()
     for tr in st:
         tr.data = tr.data.astype('float64')
     # write a single trace automatically detecting encoding
     st[0].write(tempfile, format="MSEED")
     # write a single trace automatically detecting encoding
     st.write(tempfile, format="MSEED")
     # write a single trace with encoding 5
     st[0].write(tempfile, format="MSEED", encoding=5)
     # write a single trace with encoding 5
     st.write(tempfile, format="MSEED", encoding=5)
     # 2 - transform to np.float32 values
     st = read()
     for tr in st:
         tr.data = tr.data.astype('float32')
     # write a single trace automatically detecting encoding
     st[0].write(tempfile, format="MSEED")
     # write a single trace automatically detecting encoding
     st.write(tempfile, format="MSEED")
     # write a single trace with encoding 4
     st[0].write(tempfile, format="MSEED", encoding=4)
     # write a single trace with encoding 4
     st.write(tempfile, format="MSEED", encoding=4)
     # 3 - transform to np.int32 values
     st = read()
     for tr in st:
         tr.data = tr.data.astype('int32')
     # write a single trace automatically detecting encoding
     st[0].write(tempfile, format="MSEED")
     # write a single trace automatically detecting encoding
     st.write(tempfile, format="MSEED")
     # write a single trace with encoding 3
     st[0].write(tempfile, format="MSEED", encoding=3)
     # write the whole stream with encoding 3
     st.write(tempfile, format="MSEED", encoding=3)
     # write a single trace with encoding 10
     st[0].write(tempfile, format="MSEED", encoding=10)
     # write the whole stream with encoding 10
     st.write(tempfile, format="MSEED", encoding=10)
     # write a single trace with encoding 11
     st[0].write(tempfile, format="MSEED", encoding=11)
     # write the whole stream with encoding 11
     st.write(tempfile, format="MSEED", encoding=11)
     # 4 - transform to np.int16 values
     st = read()
     for tr in st:
         tr.data = tr.data.astype('int16')
     # write a single trace automatically detecting encoding
     st[0].write(tempfile, format="MSEED")
     # write a single trace automatically detecting encoding
     st.write(tempfile, format="MSEED")
     # write a single trace with encoding 1
     st[0].write(tempfile, format="MSEED", encoding=1)
     # write the whole stream with encoding 1
     st.write(tempfile, format="MSEED", encoding=1)
     # 5 - transform to ASCII values
     st = read()
     for tr in st:
         tr.data = tr.data.astype('|S1')
     # write a single trace automatically detecting encoding
     st[0].write(tempfile, format="MSEED")
     # write a single trace automatically detecting encoding
     st.write(tempfile, format="MSEED")
     # write a single trace with encoding 0
     st[0].write(tempfile, format="MSEED", encoding=0)
     # write the whole stream with encoding 0
     st.write(tempfile, format="MSEED", encoding=0)
     # cleanup
     os.remove(tempfile)
예제 #38
0
def main(argv=sys.argv):

    #Earth's parameters
    #~ beta = 4.e3 #m/s
    #~ rho = 3.e3 #kg/m^3
    #~ mu = rho*beta*beta

    PLotSt = [
        "IU.TRQA.00.LHZ", "IU.LVC.00.LHZ", "II.NNA.00.LHZ", "IU.RAR.00.LHZ"
    ]

    #PlotSubf = [143, 133, 123, 113, 103, 93,
    #           83, 73, 63, 53]
    PlotSubf = [6, 3]

    #Set rup_vel = 0 to have a point source solution
    RupVel = 2.1  #Chilean eq from Lay et al
    t_h = 10.  # Half duration for each sf
    noiselevel = 0.0  # L1 norm level of noise
    mu = 40e9
    #W-Phase filter
    corners = 4.
    fmin = 0.001
    fmax = 0.005

    ### Data from Chilean 2010 EQ (Same as W phase inv.)
    strike = 18.
    dip = 18.
    rake = 104.  # 109.

    rakeA = rake + 45.
    rakeB = rake - 45.

    ### Fault's grid parameters
    nsx = 21  #Number of sf along strike
    nsy = 11  #Number of sf along dip
    flen = 600.  #Fault's longitude [km] along strike
    fwid = 300.  #Fault's longitude [km] along dip
    direc = 0  #Directivity 0 = bilateral
    Min_h = 10.  #Min depth of the fault

    ### Derivated parameters:
    nsf = nsx * nsy
    sflen = flen / float(nsx)
    sfwid = fwid / float(nsy)
    swp = [1, 0, 2]  # useful to swap (lat,lon, depth)
    mindist = flen * fwid  # minimun dist to the hypcen (initializing)

    ###Chessboard
    #weight = np.load("RealSol.npy")
    weight = np.zeros(nsf)
    weight[::2] = 1
    #weight[::2] = 1
    #~ weight[10]=15
    #~ weight[5001]=10
    #~ weight[3201]=2

    ## Setting dirs and reading files.
    GFdir = "/home/roberto/data/GFS/"
    workdir = os.path.abspath(".") + "/"
    datadir = workdir + "DATA/"
    tracesfilename = workdir + "goodtraces.dat"
    tracesdir = workdir + "WPtraces/"

    try:
        reqfilename = glob.glob(workdir + '*.syn.req')[0]
    except IndexError:
        print "There is not *.syn.req file in the dir"
        sys.exit()

    basename = reqfilename.split("/")[-1][:-4]

    if not os.path.exists(tracesfilename):
        print tracesfilename, "does not exist."
        exit()

    if not os.path.exists(datadir):
        os.makedirs(datadir)

    if not os.path.exists(tracesdir):
        os.makedirs(tracesdir)

    tracesfile = open(tracesfilename)
    reqfile = open(reqfilename)

    trlist = readtraces(tracesfile)
    eqdata = readreq(reqfile)

    tracesfile.close()
    reqfile.close()

    ####Hypocentre from
    ### http://earthquake.usgs.gov/earthquakes/eqinthenews/2010/us2010tfan/
    cmteplat = -35.91  #-35.85#-36.03#-35.83
    cmteplon = -72.73  #-72.72#-72.83# -72.67
    cmtepdepth = 35.
    eq_hyp = (cmteplat, cmteplon, cmtepdepth)

    ############

    # Defining the sf system
    grid, sblt = fault_grid('CL-2010',
                            cmteplat,
                            cmteplon,
                            cmtepdepth,
                            direc,
                            Min_h,
                            strike,
                            dip,
                            rake,
                            flen,
                            fwid,
                            nsx,
                            nsy,
                            Verbose=False,
                            ffi_io=True,
                            gmt_io=True)

    print('CL-2010', cmteplat, cmteplon, cmtepdepth, direc, Min_h, strike, dip,
          rake, flen, fwid, nsx, nsy)
    print grid[0][1]
    #sys.exit()
    #This calculation is inside of the loop
    #~ NP = [strike, dip, rake]
    #~ M = np.array(NodalPlanetoMT(NP))
    #~ Mp = np.sum(M**2)/np.sqrt(2)

    #############################################################################
    ######Determining the sf closest to the hypocentre:
    min_Dist_hyp_subf = flen * fwid
    for subf in range(nsf):
        sblat = grid[subf][1]
        sblon = grid[subf][0]
        sbdepth = grid[subf][2]
        sf_hyp = (sblat, sblon, sbdepth)
        Dist_hyp_subf = hypo2dist(eq_hyp, sf_hyp)
        if Dist_hyp_subf < min_Dist_hyp_subf:
            min_Dist_hyp_subf = Dist_hyp_subf
            min_sb_hyp = sf_hyp
            hyp_subf = subf
    ####Determining trimming times:
    test_tr = read(GFdir + "H003.5/PP/GF.0001.SY.LHZ.SAC")[0]
    t0 = test_tr.stats.starttime
    TrimmingTimes = {}  # Min. Distace from the fault to each station.
    A = 0
    for trid in trlist:
        metafile = workdir + "DATA/" + "META." + trid + ".xml"
        META = DU.getMetadataFromXML(metafile)[trid]
        stlat = META['latitude']
        stlon = META['longitude']
        dist =   locations2degrees(min_sb_hyp[0],min_sb_hyp[1],\
                                   stlat,stlon)
        parrivaltime = getTravelTimes(dist, min_sb_hyp[2])[0]['time']
        ta = t0 + parrivaltime
        tb = ta + round(15. * dist)
        TrimmingTimes[trid] = (ta, tb)

    ###########################

    DIST = []
    # Ordering the stations in terms of distance
    for trid in trlist:
        metafile = workdir + "DATA/" + "META." + trid + ".xml"
        META = DU.getMetadataFromXML(metafile)[trid]
        lat = META['latitude']
        lon = META['longitude']
        trdist = locations2degrees(cmteplat, cmteplon, lat, lon)
        DIST.append(trdist)

    DistIndex = lstargsort(DIST)
    trlist = [trlist[i] for i in DistIndex]

    stdistribution = StDistandAzi(trlist, eq_hyp, workdir + "DATA/")
    StDistributionPlot(stdistribution)
    #exit()
    #Main loop

    for subf in range(nsf):
        print subf
        sflat = grid[subf][1]
        sflon = grid[subf][0]
        sfdepth = grid[subf][2]
        #~ strike = grid[subf][3] #+ 360.
        #~ dip    = grid[subf][4]
        #~ rake   = grid[subf][5] #
        NP = [strike, dip, rake]
        NPA = [strike, dip, rakeA]
        NPB = [strike, dip, rakeB]

        M = np.array(NodalPlanetoMT(NP))
        MA = np.array(NodalPlanetoMT(NPA))
        MB = np.array(NodalPlanetoMT(NPB))
        #Time delay is calculated as the time in which
        #the rupture reach the subfault

        sf_hyp = (sflat, sflon, sfdepth)
        Dist_ep_subf = hypo2dist(eq_hyp, sf_hyp)

        if Dist_ep_subf < mindist:
            mindist = Dist_ep_subf
            minsubf = subf

        if RupVel == 0:
            t_d = eqdata['time_shift']
        else:
            t_d = round(Dist_ep_subf / RupVel)  #-59.

        print sflat, sflon, sfdepth
        # Looking for the best depth dir:
        depth = []
        depthdir = []
        for file in os.listdir(GFdir):
            if file[-2:] == ".5":
                depthdir.append(file)
                depth.append(float(file[1:-2]))
        BestDirIndex = np.argsort(abs(sfdepth\
                                  - np.array(depth)))[0]
        hdir = GFdir + depthdir[BestDirIndex] + "/"

        ###

        SYN = np.array([])
        SYNA = np.array([])
        SYNB = np.array([])
        for trid in trlist:

            metafile = workdir + "DATA/" + "META." + trid + ".xml"
            META = DU.getMetadataFromXML(metafile)[trid]
            lat = META['latitude']
            lon = META['longitude']

            #Subfault loop
            #GFs Selection:
            ##Change to folloing loop

            dist = locations2degrees(sflat, sflon, lat, lon)
            azi = -np.pi / 180. * gps2DistAzimuth(lat, lon, sflat, sflon)[2]
            trPPsy,  trRRsy, trRTsy,  trTTsy = \
                                       GFSelectZ(hdir,dist)

            trROT = MTrotationZ(azi, trPPsy, trRRsy, trRTsy, trTTsy)
            orig = trROT[0].stats.starttime
            dt = trROT[0].stats.delta

            trianglen = 2. * int(t_h / dt) - 1.
            FirstValid = int(trianglen / 2.) + 1  # to delete
            window = triang(trianglen)
            window /= np.sum(window)
            #window = np.array([1.])

            parrivaltime = getTravelTimes(dist, sfdepth)[0]['time']

            t1 = TrimmingTimes[trid][0] - t_d
            t2 = TrimmingTimes[trid][1] - t_d

            for trR in trROT:
                trR.data *= 10.**-21  ## To get M in Nm
                trR.data -= trR.data[0]
                AUX1 = len(trR)
                trR.data = convolve(trR.data, window, mode='valid')
                AUX2 = len(trR)
                mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\
                               trR.data[:60./trR.stats.delta*1.-FirstValid+1])))
                #mean = np.mean(trR.data[:60])
                trR.data -= mean
                trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats.delta,\
                                             corners , 1 , fmin, fmax)
                t_l = dt * 0.5 * (AUX1 - AUX2)
                trR.trim(t1 - t_l, t2 - t_l, pad=True, fill_value=trR.data[0]
                         )  #We lost t_h due to the convolution

            #~ for trR in trROT:
            #~ trR.data *= 10.**-23 ## To get M in Nm
            #~ trR.data -= trR.data[0]

            #~ trR.data = convolve(trR.data,window,mode='same')

            #~ #mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\
            #~ #trR.data[:60./trR.stats.delta*1.-FirstValid+1])))
            #~ mean = np.mean(trR.data[:60])
            #~ trR.data -= mean
            #~ trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats.delta,\
            #~ corners , 1 , fmin, fmax)

            #~ trR.trim(t1,t2,pad=True, fill_value=trR.data[0])

            trROT = np.array(trROT)
            syn = np.dot(trROT.T, M)
            synA = np.dot(trROT.T, MA)
            synB = np.dot(trROT.T, MB)

            SYN = np.append(SYN, syn)
            SYNA = np.append(SYNA, synA)
            SYNB = np.append(SYNB, synB)

        print np.shape(A), np.shape(np.array([SYN]))
        if subf == 0:
            A = np.array([SYN])
            AA = np.array([SYNA])
            AB = np.array([SYNB])
        else:
            A = np.append(A, np.array([SYN]), 0)
            AA = np.append(AA, np.array([SYNA]), 0)
            AB = np.append(AB, np.array([SYNB]), 0)

    AC = np.vstack((AA, AB))
    print np.shape(AC)
    print np.shape(weight)
    B = np.dot(A.T, weight)
    stsyn = Stream()
    n = 0
    Ntraces = {}
    for trid in trlist:
        spid = trid.split(".")
        print trid
        NMIN = 1. + (TrimmingTimes[trid][1] - TrimmingTimes[trid][0]) / dt
        Ntraces[trid] = (n, NMIN + n)
        trsyn = Trace(B[n:NMIN + n])
        n += NMIN
        trsyn.stats.network = spid[0]
        trsyn.stats.station = spid[1]
        trsyn.stats.location = spid[2]
        trsyn.stats.channel = spid[3]
        trsyn = AddNoise(trsyn, level=noiselevel)
        #trsyn.stats.starttime =
        stsyn.append(trsyn)

    stsyn.write(workdir + "WPtraces/" + basename + ".decov.trim.mseed",
                format="MSEED")

    #####################################################
    # Plotting:
    #####################################################
    #we are going to reflect the y axis later, so:
    print minsubf
    hypsbloc = [minsubf / nsy, -(minsubf % nsy) - 2]

    #Creating the strike and dip axis:
    StrikeAx = np.linspace(0, flen, nsx + 1)
    DipAx = np.linspace(0, fwid, nsy + 1)
    DepthAx = DipAx * np.sin(np.pi / 180. * dip) + Min_h
    hlstrike = StrikeAx[hypsbloc[0]] + sflen * 0.5

    hldip = DipAx[hypsbloc[1]] + sfwid * 0.5
    hldepth = DepthAx[hypsbloc[1]] + sfwid * 0.5 * np.sin(np.pi / 180. * dip)

    StrikeAx = StrikeAx - hlstrike
    DipAx = DipAx - hldip

    XX, YY = np.meshgrid(StrikeAx, DepthAx)
    XX, ZZ = np.meshgrid(StrikeAx, DipAx)

    sbarea = sflen * sfwid

    SLIPS = weight.reshape(nsx, nsy).T  #[::-1,:]
    SLIPS /= mu * 1.e6 * sbarea

    ######Plot:#####################
    plt.figure()
    ax = host_subplot(111)
    im = ax.pcolor(XX, YY, SLIPS, cmap="jet")
    ax.set_ylabel('Depth [km]')
    ax.set_ylim(DepthAx[-1], DepthAx[0])

    # Creating a twin plot
    ax2 = ax.twinx()
    #im2 = ax2.pcolor(XX, ZZ, SLIPS[::-1,:], cmap="Greys")
    im2 = ax2.pcolor(XX, ZZ, SLIPS[::-1, :], cmap="jet")

    ax2.set_ylabel('Distance along the dip [km]')
    ax2.set_xlabel('Distance along the strike [km]')
    ax2.set_ylim(DipAx[0], DipAx[-1])
    ax2.set_xlim(StrikeAx[0], StrikeAx[-1])

    ax.axis["bottom"].major_ticklabels.set_visible(False)
    ax2.axis["bottom"].major_ticklabels.set_visible(False)
    ax2.axis["top"].set_visible(True)
    ax2.axis["top"].label.set_visible(True)

    divider = make_axes_locatable(ax)
    cax = divider.append_axes("bottom", size="5%", pad=0.1)
    cb = plt.colorbar(im, cax=cax, orientation="horizontal")
    cb.set_label("Slip [m]")
    ax2.plot([0], [0], '*', ms=225. / (nsy + 4))
    ax2.set_xticks(ax2.get_xticks()[1:-1])
    #ax.set_yticks(ax.get_yticks()[1:])
    #ax2.set_yticks(ax2.get_yticks()[:-1])

    #########Plotting the selected traces:
    nsp = len(PLotSt) * len(PlotSubf)
    plt.figure(figsize=(13, 11))
    plt.title("Synthetics for rake = " + str(round(rake)))
    mindis = []
    maxdis = []
    for i, trid in enumerate(PLotSt):
        x = np.arange(0, Ntraces[trid][1] - Ntraces[trid][0], dt)
        for j, subf in enumerate(PlotSubf):
            y = A[subf, Ntraces[trid][0]:Ntraces[trid][1]]
            if j == 0:
                yy = y
            else:
                yy = np.vstack((yy, y))
        maxdis.append(np.max(yy))
        mindis.append(np.min(yy))

    for i, trid in enumerate(PLotSt):
        x = np.arange(0, Ntraces[trid][1] - Ntraces[trid][0], dt)

        for j, subf in enumerate(PlotSubf):
            y = A[subf, Ntraces[trid][0]:Ntraces[trid][1]]
            plt.subplot2grid((len(PlotSubf), len(PLotSt)), (j, i))
            plt.plot(x, y, linewidth=2.5)
            if j == 0:
                plt.title(trid)
            fig = plt.gca()
            fig.axes.get_yaxis().set_ticks([])
            fig.set_ylabel(str(subf), rotation=0)
            fig.set_xlim((x[0], x[-1]))
            fig.set_ylim((mindis[i], maxdis[i]))
            if subf != PlotSubf[-1]:
                fig.axes.get_xaxis().set_ticks([])

    plt.show()
예제 #39
0
def get_picks(reading_path, archive_definitions=[]):
    """
    Reads S-file and slices waveforms for event
    :param reading_path: path to S-file
    :param archive_definitions: list of archive definitions
    :return: list of lists of picks per station
    """
    # Parse s-file and error checks
    try:
        events = nordic_reader.read_nordic(
            reading_path,
            True)  # Events tuple: (event.Catalog, [waveforms file names]
    except nordic_reader.NordicParsingError as e:
        print("In {}: {}".format(reading_path, e))  # Throw exception?
        return -1
    except ValueError as e:
        print("In {}: {}".format(reading_path, e))  # Throw exception?
        return -1
    except AttributeError as e:
        print("In {}: {}".format(reading_path, e))  # Throw exception?
        return -1
    if len(events[0].events) != 1:
        print("In {}: Events number is {}".format(
            reading_path, len(events[0].events)))  # Throw exception?
        return -1

    event = events[0].events[0]

    if len(event.picks) == 0:
        print("In {}: No picks!".format(reading_path))  # Throw exception?
        return -1

    # Parse S-file additional info
    parsed_data = parse_s_file(reading_path, len(event.picks))

    #print("Parsed data:", "{}".format(parsed_data), sep='\n')
    event_id = parsed_data[0]
    picks_dists = parsed_data[1]
    picks_seconds = parsed_data[2]

    # Min magnitude check
    magnitude = None
    if len(event.magnitudes) > 0:
        magnitude = event.magnitudes[0].mag
        # if event.magnitudes[0].mag < config.min_magnitude:
        # print("In {}") Throw exception?
        # return -1

    # Max depth check
    depth = None
    if len(event.origins) > 0:
        depth = event.origins[0].depth
        # if event.origins[0].depth is None:
        # print("In {}") Throw exception?
        # return -1
        # if event.origins[0].depth > config.max_depth:
        # print("In {}") Throw exception?
        # return -1
    # else:
    # print("In {}") Throw exception?
    # return -1

    # Picks slicing
    index = -1
    # List of picks: [[station, phase, distance, [archive_definition, archive_path, start, end, [archive trace slices]]]]
    result_list = []
    for pick in event.picks:
        index += 1

        # Get pick time
        if config.seconds_high_precision:
            if index < len(picks_seconds):
                if picks_seconds[index] is not None:
                    pick_sec = picks_seconds[index]
                else:
                    pick_sec = pick.time.second
            else:
                print(
                    "In {}: Index for picks is out of range for picks_seconds list!"
                    .format(reading_path))  # Throw exception?
                return -1
        else:
            pick_sec = pick.time.second

        time = UTCDateTime(pick.time.year, pick.time.month, pick.time.day,
                           pick.time.hour, pick.time.minute, pick_sec)

        # Check pick distance
        distance = None
        if index < len(picks_dists):
            distance = picks_dists[index]
            # if picks_dists[index] is not None:
            # if picks_dists[index] > config.max_dist:
            # continue
        # else:
        # print("In {}") Throw exception?
        # return -1

        # Find pick in archives
        station = pick.waveform_id.station_code
        station_archives = seisan.station_archives(archive_definitions,
                                                   station)

        archives_picks = [
        ]  # [archive_definition, archive_path, start, end, [archive trace slices]]
        for x in station_archives:
            # Check if archive exists for current pick
            if x[4] > time:
                continue
            if x[5] is not None and time + config.slice_duration - config.static_slice_offset > x[
                    5]:
                continue

            # Find archive
            archive_path = seisan.archive_path(x, time.year, time.julday,
                                               config.archives_path)
            if not os.path.isfile(archive_path):
                continue
            try:
                archive_st = read(archive_path)
            except TypeError as e:
                print("In {}: {}".format(reading_path, e))  # Throw exception?
                return -1

            # Get start and end time for pick
            start_time = time - config.static_slice_offset
            end_time = start_time + config.slice_duration

            shifted_start_time = start_time  # + time_shift
            shifted_end_time = end_time  # + time_shift

            archive_picks_list = []  # [[start_time, end_time, trace_slice]]
            for trace in archive_st:
                if trace.stats.starttime > shifted_start_time or shifted_end_time >= trace.stats.endtime:
                    continue

                trace_slice = trace.slice(shifted_start_time, shifted_end_time)

                archive_picks_list.append([
                    trace_slice.stats.starttime, trace_slice.stats.endtime,
                    trace_slice
                ])

            archives_picks.append([
                x, archive_path, shifted_start_time, shifted_end_time, time,
                archive_picks_list
            ])

        result_list.append(
            [station, pick.phase_hint, distance, archives_picks])

    # [event_id, reading_path, magnitude, depth,
    #   [[station, phase, distance,
    #       [[archive_definition, archive_path, start, end, pick_time,
    #                                                                   [[start_time, end_time, archive_trace_slices]]
    #       ]]
    #   ]]
    # ]
    return [event_id, reading_path, magnitude, depth, result_list]
예제 #40
0
def preprocess(db, stations, comps, goal_day, params, responses=None):
    """
    Fetches data for each ``stations`` and each ``comps`` using the
    data_availability table in the database.

    To correct for instrument responses, make sure to set ``remove_response``
    to "Y" in the config and to provide the ``responses`` DataFrame.

    :Example:

    >>> from msnoise.api import connect, get_params, preload_instrument_responses
    >>> from msnoise.preprocessing import preprocess
    >>> db = connect()
    >>> params = get_params(db)
    >>> responses = preload_instrument_responses(db)
    >>> st = preprocess(db, ["YA.UV06","YA.UV10"], ["Z",], "2010-09-01", params, responses)
    >>> st
     2 Trace(s) in Stream:
    YA.UV06.00.HHZ | 2010-09-01T00:00:00.000000Z - 2010-09-01T23:59:59.950000Z | 20.0 Hz, 1728000 samples
    YA.UV10.00.HHZ | 2010-09-01T00:00:00.000000Z - 2010-09-01T23:59:59.950000Z | 20.0 Hz, 1728000 samples

    :type db: :class:`sqlalchemy.orm.session.Session`
    :param db: A :class:`~sqlalchemy.orm.session.Session` object, as
        obtained by :func:`msnoise.api.connect`.
    :type stations: list of str
    :param stations: a list of station names, in the format NET.STA.
    :type comps: list of str
    :param comps: a list of component names, in Z,N,E,1,2.
    :type goal_day: str
    :param goal_day: the day of data to load, ISO 8601 format: e.g. 2016-12-31.
    :type params: class
    :param params: an object containing the config parameters, as obtained by
        :func:`msnoise.api.get_params`.
    :type responses: :class:`pandas.DataFrame`
    :param responses: a DataFrame containing the instrument responses, as
        obtained by :func:`msnoise.api.preload_instrument_responses`.
    :rtype: :class:`obspy.core.stream.Stream`
    :return: A Stream object containing all traces.
    """
    datafiles = {}
    output = Stream()
    MULTIPLEX = False
    MULTIPLEX_files = {}
    for station in stations:
        datafiles[station] = {}
        net, sta = station.split('.')
        gd = datetime.datetime.strptime(goal_day, '%Y-%m-%d')
        files = get_data_availability(db,
                                      net=net,
                                      sta=sta,
                                      starttime=gd,
                                      endtime=gd)
        for comp in comps:
            datafiles[station][comp] = []
        for file in files:
            if file.sta != "MULTIPLEX":
                if file.comp[-1] not in comps:
                    continue
                fullpath = os.path.join(file.path, file.file)
                datafiles[station][file.comp[-1]].append(fullpath)
            else:
                MULTIPLEX = True
                print("Mutliplex mode, reading the files")
                fullpath = os.path.join(file.path, file.file)
                multiplexed = sorted(glob.glob(fullpath))
                for comp in comps:
                    for fn in multiplexed:
                        if fn in MULTIPLEX_files:
                            _ = MULTIPLEX_files[fn]
                        else:
                            # print("Reading %s" % fn)
                            _ = read(fn, format=params.archive_format or None)
                            traces = []
                            for tr in _:
                                if "%s.%s" % (
                                        tr.stats.network, tr.stats.station
                                ) in stations and tr.stats.channel[-1] in comps:
                                    traces.append(tr)
                            del _
                            _ = Stream(traces=traces)
                            MULTIPLEX_files[fn] = _
                        datafiles[station][comp].append(_)

    for istation, station in enumerate(stations):
        net, sta = station.split(".")
        for comp in comps:
            files = eval("datafiles['%s']['%s']" % (station, comp))
            if len(files) != 0:
                logger.debug("%s.%s Reading %i Files" %
                             (station, comp, len(files)))
                traces = []
                for file in files:
                    if isinstance(file, Stream):
                        st = file.select(network=net,
                                         station=sta,
                                         component=comp).copy()
                    else:
                        try:
                            # print("Reading %s" % file)
                            # t=  time.time()
                            st = read(file,
                                      dytpe=np.float,
                                      starttime=UTCDateTime(gd),
                                      endtime=UTCDateTime(gd) + 86400,
                                      station=sta,
                                      format=params.archive_format or None)
                            # print("done in", time.time()-t)
                        except:
                            logger.debug("ERROR reading file %s" % file)
                            continue
                    for tr in st:
                        if len(tr.stats.channel) == 2:
                            tr.stats.channel += tr.stats.location
                            tr.stats.location = "00"
                    tmp = st.select(network=net, station=sta, component=comp)
                    if not len(tmp):
                        for tr in st:
                            tr.stats.network = net
                        st = st.select(network=net,
                                       station=sta,
                                       component=comp)
                    else:
                        st = tmp
                    for tr in st:
                        tr.data = tr.data.astype(np.float)
                        tr.stats.network = tr.stats.network.upper()
                        tr.stats.station = tr.stats.station.upper()
                        tr.stats.channel = tr.stats.channel.upper()

                        traces.append(tr)
                    del st
                stream = Stream(traces=traces)
                if not (len(stream)):
                    continue
                f = io.BytesIO()
                stream.write(f, format='MSEED')
                f.seek(0)
                stream = read(f, format="MSEED")

                stream.sort()
                # try:
                #     # HACK not super clean... should find a way to prevent the
                #     # same trace id with different sps to occur
                #     stream.merge(method=1, interpolation_samples=3, fill_value=None)
                # except:
                #     logger.debug("Error while merging...")
                #     traceback.print_exc()
                #     continue
                # stream = stream.split()
                if not len(stream):
                    continue
                logger.debug("%s Checking sample alignment" % stream[0].id)
                for i, trace in enumerate(stream):
                    stream[i] = check_and_phase_shift(
                        trace, params.preprocess_taper_length)

                logger.debug("%s Checking Gaps" % stream[0].id)
                if len(getGaps(stream)) > 0:
                    max_gap = params.preprocess_max_gap * stream[
                        0].stats.sampling_rate

                    gaps = getGaps(stream)
                    while len(gaps):
                        too_long = 0
                        for gap in gaps:
                            if int(gap[-1]) <= max_gap:
                                try:
                                    stream[gap[0]] = stream[gap[0]].__add__(
                                        stream[gap[1]],
                                        method=1,
                                        fill_value="interpolate")
                                    stream.remove(stream[gap[1]])
                                except:
                                    stream.remove(stream[gap[1]])

                                break
                            else:
                                too_long += 1

                        if too_long == len(gaps):
                            break
                        gaps = getGaps(stream)
                    del gaps

                stream = stream.split()
                for tr in stream:
                    if tr.stats.sampling_rate < (params.goal_sampling_rate -
                                                 1):
                        stream.remove(tr)
                taper_length = params.preprocess_taper_length  # seconds
                for trace in stream:
                    if trace.stats.npts < (4 * taper_length *
                                           trace.stats.sampling_rate):
                        stream.remove(trace)
                    else:
                        trace.detrend(type="demean")
                        trace.detrend(type="linear")
                        trace.taper(max_percentage=None,
                                    max_length=taper_length)

                if not len(stream):
                    logger.debug(" has only too small traces, skipping...")
                    continue

                for trace in stream:
                    logger.debug("%s Highpass at %.2f Hz" %
                                 (trace.id, params.preprocess_highpass))
                    trace.filter("highpass",
                                 freq=params.preprocess_highpass,
                                 zerophase=True,
                                 corners=4)

                    if trace.stats.sampling_rate != params.goal_sampling_rate:
                        logger.debug("%s Lowpass at %.2f Hz" %
                                     (trace.id, params.preprocess_lowpass))
                        trace.filter("lowpass",
                                     freq=params.preprocess_lowpass,
                                     zerophase=True,
                                     corners=8)

                        if params.resampling_method == "Resample":
                            logger.debug("%s Downsample to %.1f Hz" %
                                         (trace.id, params.goal_sampling_rate))
                            trace.data = resample(
                                trace.data, params.goal_sampling_rate /
                                trace.stats.sampling_rate, 'sinc_fastest')

                        elif params.resampling_method == "Decimate":
                            decimation_factor = trace.stats.sampling_rate / params.goal_sampling_rate
                            if not int(decimation_factor) == decimation_factor:
                                logger.warning(
                                    "%s CANNOT be decimated by an integer factor, consider using Resample or Lanczos methods"
                                    " Trace sampling rate = %i ; Desired CC sampling rate = %i"
                                    % (trace.id, trace.stats.sampling_rate,
                                       params.goal_sampling_rate))
                                sys.stdout.flush()
                                sys.exit()
                            logger.debug("%s Decimate by a factor of %i" %
                                         (trace.id, decimation_factor))
                            trace.data = trace.data[::int(decimation_factor)]

                        elif params.resampling_method == "Lanczos":
                            logger.debug("%s Downsample to %.1f Hz" %
                                         (trace.id, params.goal_sampling_rate))
                            trace.data = np.array(trace.data)
                            trace.interpolate(
                                method="lanczos",
                                sampling_rate=params.goal_sampling_rate,
                                a=1.0)

                        trace.stats.sampling_rate = params.goal_sampling_rate
                    del trace

                if params.remove_response:
                    logger.debug('%s Removing instrument response' %
                                 stream[0].id)

                    response = responses[responses["channel_id"] ==
                                         stream[0].id]
                    if len(response) > 1:
                        response = response[
                            response["start_date"] <= UTCDateTime(gd)]
                    if len(response) > 1:
                        response = response[
                            response["end_date"] >= UTCDateTime(gd)]
                    elif len(response) == 0:
                        logger.info("No instrument response information "
                                    "for %s, skipping" % stream[0].id)
                        continue
                    try:
                        datalesspz = response["paz"].values[0]
                    except:
                        logger.error("Bad instrument response information "
                                     "for %s, skipping" % stream[0].id)
                        continue
                    stream.simulate(
                        paz_remove=datalesspz,
                        remove_sensitivity=True,
                        pre_filt=params.response_prefilt,
                        paz_simulate=None,
                    )
                for tr in stream:
                    tr.data = tr.data.astype(np.float32)
                output += stream
                del stream
            del files
    clean_scipy_cache()
    del MULTIPLEX_files
    return output
예제 #41
0
import glob
import time
import obspy
import numpy as np
from numpy import argmax
import matplotlib.pyplot as plt
#%% constants
shift = 100
step = 2
crite = 0.55  # REDUCED FROM V10
window = 45
fmin = 0.1
fmax = 10

#%% Read in sample waveforms with which to compare all waveforms
sample = read('/Users/william/Documents/lb01/14_330z.mseed')
trace = sample[0]
trace.detrend(type='demean')
trace.filter("bandpass", freqmin=fmin, freqmax=fmax)

#Explosion
start = sample[
    0].stats.starttime + 2 * 60 * 60 + 22 * 60 + 12  #time window start - several s before Pwave arrival
end = start + window
trs = trace.slice(
    starttime=start, endtime=end
)  #cut out sample waveform with same window length as chosen event
trs_e = obspy.signal.filter.envelope(trs.data)
#print('reference waveform')
#trs.plot(type='relative',color='b', starttime=start , endtime=end)
#%%
예제 #42
0
    etime = stime + 6. * 60. * 60.

    length = 4 * 4096
    overlap = 2 * 1024

    f = open(
        str(stime.year) + '_' + str(stime.julday).zfill(3) + '_' +
        str(stime.hour).zfill(2) + '_' + str(stime.minute).zfill(2) +
        'RESULTSLP', 'w')

    for chan in chans:

        st = Stream()
        st += read('/tr1/telemetry_days/' + stas[0] + '/2016/2016_' +
                   str(stime.julday).zfill(3) + '/' + locs[0] + '_' + chan +
                   '.512.seed')
        st += read('/tr1/telemetry_days/' + stas[1] + '/2016/2016_' +
                   str(stime.julday).zfill(3) + '/' + locs[1] + '_' + chan +
                   '.512.seed')

        st += read('/tr1/telemetry_days/' + stas[2] + '/2016/2016_' +
                   str(stime.julday).zfill(3) + '/' + locs[2] + '_' + chan +
                   '.512.seed')

        st.trim(starttime=stime, endtime=etime)

        delta = st[0].stats.delta
        instresp = computeresp(paz, delta, length)

        (p11, fre1) = cp(st[0], st[0], length, overlap, delta)
예제 #43
0
    ns_start, ns_end = 100.0, 400.0
    sig_start, sig_end = 600, 800

    back_az_vals = np.arange(-180.0, 180.0, 1.5)
    trc_vel_vals = np.arange(250.0, 600.0, 2.5)

    method="capon"

    #p = mp.ProcessingPool(cpu_count() - 1)
    p = Pool(cpu_count() - 1)

    # ######################### #
    #         Read and          #
    #        Filter Data        #
    # ######################### #
    x, t, t0, geom = beamforming_new.stream_to_array_data(read(sac_glob))
    M, N = x.shape

    # ######################### #
    #         View Data         #
    # ######################### #
    plt.figure(1)
    for m in range(M):
        plt.subplot(M, 1, m + 1)
        plt.xlim([sig_start, sig_end])
        plt.plot(t, x[m], '-k')
        if m < (M - 1) : plt.setp(plt.subplot(M, 1, m + 1).get_xticklabels(), visible=False)
    plt.show(block=False)

    plt.figure(2)
    plt.show(block=False)
예제 #44
0
def _cut_signal(trnm, dests, epsilon=1e-4):
    """
    Return cut of positive & negative lags.
    """
    I2 = read(trnm, format='SAC')[0]

    # Check window length
    sr = I2.stats.sampling_rate
    bp, ep, en, bn = _cut_ends(I2)
    tlen = ep - bp
    if USE_CW and (tlen < PARAM['cut']['min_len']):
        logger.debug(f'{basename(trnm)}: tlen = {tlen:.1f} s')
        return {}

    # SNR threshold on I2
    sym = my.seis.sym_xc(I2)
    try:
        _snr = I2.stats.sac[KEY2SHD['snr']]
    except (KeyError, AttributeError):
        _snr = my.seis.snr(sym, **PARAM['snr'])
        I2.stats.sac[KEY2SHD['snr']] = _snr
        I2.write(trnm)

    if _snr < PARAM['snr']['min']:
        logger.debug(f'{basename(trnm)}: SNR = {_snr:.1f}')
        return {}

    # Check delta
    _delta = I2.stats.delta
    delta2 = PARAM['cut']['delta']
    if abs(_delta - delta2) > epsilon:
        logger.warning(f'Resample {trnm} (delta={_delta:g})')
        I2.resample(sampling_rate=1 / delta2)

    # Cut
    if USE_CW:
        sym.data = sym.data[int(bp * sr):]
        sym.stats.sac.b = bp
        sym.taper(max_percentage=0.05, type='hann', side='both')

    if len(dests) == 1:
        lags = [sym]
    elif len(dests) == 2:
        e = I2.stats.sac.e
        plag = my.seis.sliced(I2, 0, e)
        plag.stats.sac.b = 0
        plag.stats.sac.e = e
        nlag = my.seis.sliced(I2, -e, 0)
        nlag.stats.sac.b = -e
        nlag.stats.sac.e = 0
        lags = [plag, nlag]

    # 1-bit
    if PARAM['preproc'].get('onebit', False):
        for lag in lags:
            lag.data = one_bit(lag.data)

    # Whiten
    if PARAM['preproc']['whiten'].get('val', False):
        for lag in lags:
            lag = whiten(lag, **PARAM['preproc']['whiten'])

    # Mute
    if USE_DW and getattr(PARAM['cut'], 'mute', True):
        kwargs_mute = {
            't1': bp,
            't2': ep,
            'sr': sr,
            'precursor_only': PARAM['cut']['mute_prc_only'],
        }
        for i, lag in enumerate(lags):
            # Negative lag
            if i == 1:
                lag.data = mute(lag.data[::-1], **kwargs_mute)[::-1]
            else:
                lag.data = mute(lag.data, **kwargs_mute)

    d2l = {}
    for lag, dest in zip(lags, dests):
        d2l[dest] = lag

    return d2l
예제 #45
0
vel = 1970
rho = 2400
R = 0.6
F = 2
win_len = 2.

example_events = [
    20140718044958, 20140822015232, 20140910165727, 20140717144824
]

plt.figure(figsize=[6, 4])
for event in example_events:
    for station in ['NOLF']:

        name = 'Data/NOLF/GB.%s.%s.HHE.sac' % (station, event)
        st = read(name)
        name = 'Data/NOLF/GB.%s.%s.HHN.sac' % (station, event)
        st += read(name)

        s_arrival = st[0].stats.sac['t0']

        print(st)

        st_eq = st.copy()
        start = st[0].stats.starttime

        st_eq = st_eq.slice(start + s_arrival - 0.2,
                            start + s_arrival + win_len)
        st_eq = st_eq.taper(type='cosine', max_percentage=0.05)

        delta = st[0].stats.delta
sumnois3coh = []
s1n1 = []
s2n2 = []
s1s2n1n2 = []

#Slep width set here
slepwid = 0.025
slepper = 0.05
#As we will be making multiple figures in a loop, we need a counter
fignum = 1
#Reading in traces and setting phase lags and lowtimes/hightimes, also performing unnormalised cross correlations
#And pulling out phase coherence values
for j in range(0, len(biglist)):
    stime = -100
    #Copy traces so you don't overwrite them
    trorig = read(lilist[j])
    tr2orig = read(biglist[j])
    listoff.append((lilist[j], biglist[j]))
    tr = trorig.copy()
    tr2 = tr2orig.copy()

    #Applying processing to trace no. 2, this consists of a simple taper, filter and cut
    arr1 = tr[0]
    arr2 = tr2[0]
    t71 = arr1.stats.sac.t7
    t72 = arr2.stats.sac.t7
    low2 = t72
    temp = tr2[0].stats.starttime
    hi2 = t72 + 200
    tr2cop = tr2.copy()
    tr2cop.trim(temp + (low2 * 0.01), temp + (hi2 * 0.01))
예제 #47
0
def _source_specifc_interferogram(trnm1, trnm2, rec1, rec2, src, lags,
                                  **kwargs):
    """
    Construct a source-specific interferogram (:math:`C_3`), i.e.,
    interferogram of two :math:`I_2`.

    :param rec1: Name of the first receiver-station.
    :param rec2: Name of the second receiver-station.
    :param src: Name of the source-station.
    :param lags: Use which lags for I3.
    :param dir_src: If save source direction to SAC header.
    """
    dest = get_fnm('C3', rec1, sta2=rec2, sta3=src, lags=lags)

    if PARAM['skip']['C3'] and exists(dest):
        logger.debug(f'{dest} already exists.')
        if PARAM['write']['stack']:
            return dest, read(dest, format='SAC')[0]
        else:
            return None, None

    tr1 = DEST2LAG[trnm1]
    tr2 = DEST2LAG[trnm2]

    # Find common part
    if USE_CW:
        tr1, tr2 = overlap(tr1, tr2, lags)

    # Flip negative lag
    if PARAM['interferometry']['flip_nlag']:
        flip_nlag(tr1, tr2, lags)

    # Do interferometry
    if USE_DW and PARAM['interferometry']['phase_shift']:
        kwa_ps = {
            'delta': tr1.stats.delta,
            'dr': kwargs.get('dr'),
            'per': kwargs.get('phprper'),
            'pv': kwargs.get('phprvel'),
        }
        if CONV:
            C3 = xc_ps(tr1, tr2, **kwa_ps, **PARAM['interferometry'])
        elif CORR:
            xc = my.seis.x_cor(tr1, tr2, **PARAM['interferometry'])
            C3 = pick_lag(xc, kwargs.get('dir_src'))
            C3 = phase_shift(data=C3, **kwa_ps)
    else:
        C3 = my.seis.x_cor(tr1, tr2, **PARAM['interferometry'])
        if USE_DW and CORR and PARAM['interferometry']['pick_lag']:
            C3 = pick_lag(C3, kwargs.get('dir_src'))

    # Make header
    b = -int(np.floor(C3.size / 2) / tr1.stats.delta)
    if USE_CW:
        if PARAM['interferometry'].get('Welch', False):
            b = -PARAM['interferometry']['subwin']
    if USE_DW:
        if CONV or PARAM['interferometry']['pick_lag']:
            b = 0
    if PARAM['interferometry']['symmetric'] or CONV:
        nsided = 1
    else:
        nsided = 2

    header = my.seis.sachd(
        **{
            'b': b,
            'e': int(b + C3.size * tr1.stats.delta),
            'delta': tr1.stats.delta,
            'npts': C3.size,
            'kevnm': src,
            'evlo': STNM2LOLA[src][0],
            'evla': STNM2LOLA[src][1],
            'knetwk': STA2NET[src],
            'kstnm': rec2,
            'stlo': STNM2LOLA[rec2][0],
            'stla': STNM2LOLA[rec2][1],
            'dist': kwargs.get('dist', DEF_SHD),
            KEY2SHD['net_rec']: STA2NET[rec2],  # to be consistent with I2
            KEY2SHD['src_sta']: src,
            KEY2SHD['src_net']: STA2NET[src],
            KEY2SHD['nsided']: nsided,
            KEY2SHD['dr']: kwargs.get('dr', DEF_SHD),
            KEY2SHD['theta']: kwargs.get('theta', DEF_SHD),
            KEY2SHD['dir_src']: kwargs.get('dir_src', DEF_SHD),
            KEY2SHD['min_srdist']: kwargs.get('min_srdist', DEF_SHD),
        })

    # Make Trace
    C3_tr = Trace(header=header, data=C3)
    if USE_CW and PARAM['interferometry']['symmetric']:
        C3_tr = my.seis.sym_xc(C3_tr)

    if CONV and PARAM['interferometry'].get('trim_conv', True):
        i = int(PARAM['cut']['te'] / PARAM['cut']['delta']) + 1
        C3_tr.data = C3_tr.data[:i]

    return dest, C3_tr
            if not os.path.isfile(wavenptp_1) and os.path.isfile(wavenptp):
                shutil.move(wavenptp, events_dir_back)
            else:
                print(wavenptp_1, "exist !")
            if not os.path.isfile(waveeptp_1) and os.path.isfile(waveeptp):
                shutil.move(waveeptp, events_dir_back)
            else:
                print(waveeptp_1, "exist !")
            if not os.path.isfile(wavezptp_1) and os.path.isfile(wavezptp):
                shutil.move(wavezptp, events_dir_back)
            else:
                print(wavezptp_1, "exist !")

            try:
                # read wave info
                ste = read(waveedd_1)
                tre = ste[0]
                # tre.detrend('demean')
                # tre.detrend('linear')
                # tre.filter(type="bandpass",freqmin=0.2,freqmax=10.0,zerophase=True)
                datatre = tre.data
                # print(datatre)
                dist = tre.stats.sac["dist"]
                ptpe = tre.stats.sac["user0"]
                lat = tre.stats.sac["evla"]
                lon = tre.stats.sac["evlo"]
                # print(ptpe)

                stn = read(wavendd_1)
                trn = stn[0]
                datatrn = trn.data
예제 #49
0
 def get_network_code_from_sacfile(self, station):
     file = self.get_single_sacfile(station)[0]
     stream = read(file)
     trace = stream[0]
     return trace.stats['network']
예제 #50
0
inventory = client.get_stations(network=net,
                                station=sta,
                                starttime=stime2,
                                endtime=stime2 + 24. * 60. * 60.,
                                level="response")

days = int(float(hours) / 24.) + 1
if days < 1:
    days = 1

for day in range(days):
    ctime = stime2 + 24. * 60. * 60. * day
    string = '/msd/' + net + '_' + sta + '/' + str(ctime.year) + '/' + str(
        ctime.julday).zfill(3) + '/'
    #string = '/tr1/telemetry_days/' + net + '_' + sta + '/' + str(ctime.year) + '/*' + str(ctime.julday).zfill(3) + '/'
    st += read(string + loc + '_LH*.seed')
    if pcorr:
        st += read(string + '30_LDO*.seed')

st.trim(starttime=stime2, endtime=stime2 + hours * 60. * 60.)
st.merge()
st.detrend('linear')
st.detrend('constant')

print(st)

st.merge()
print(st)

#st.decimate(10)
#st.decimate(10)
예제 #51
0
import numpy as np
import matplotlib.pyplot as plt
from obspy.core import read

# Read the seismogram
st = read("http://examples.obspy.org/RJOB_061005_072159.ehz.new")

# There is only one trace in the Stream object, let's work on that trace...
tr = st[0]

# Filtering with a lowpass on a copy of the original Trace
tr_filt = tr.copy()
tr_filt.filter('lowpass', freq=1.0, corners=2, zerophase=True)

# Now let's plot the raw and filtered data...
t = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
plt.subplot(211)
plt.plot(t, tr.data, 'k')
plt.ylabel('Raw Data')
plt.subplot(212)
plt.plot(t, tr_filt.data, 'k')
plt.ylabel('Lowpassed Data')
plt.xlabel('Time [s]')
plt.suptitle(tr.stats.starttime)
plt.show()
def getobs(mseed_filename,
           client,
           event,
           phases,
           frq4,
           windows,
           stas,
           stalocs,
           picks=None,
           delta_T={
               'P': 1.,
               'SH': 1.,
               'R': 10.,
               'L': 10.
           },
           taper=None,
           adjtime=None):
    # Connect to arclink server
    #st = read('../mseed/mini.seed')
    org = event.preferred_origin()
    if org is None:
        org = event.origins[0]
    st = read(mseed_filename)
    stobs = {'params': {'filter': frq4, 'windows': windows}}
    syn = {}
    torg = org.time
    trngmx = 3600.
    invout = None
    # First do any requried time adjustments
    if not adjtime is None:
        for tr in st:
            if not tr.stats.station in adjtime.keys():
                continue
            print 'Adjusting time for station %s by %g secs' % \
                (tr.stats.station,adjtime[tr.stats.station])
            tr.stats.starttime -= adjtime[tr.stats.station]

    for phase in phases:
        if not phase in stas.keys(): continue
        stobs[phase] = Stream()
        for sta in stas[phase]:
            # If this is a body wave phase find the pick - skip if none found
            if phase == 'P' or phase == 'SH':
                sta_pick = None
                # If no picks supplied then get them from events
                if picks is None:
                    for pick in event.picks:
                        if pick.phase_hint == phase[0:1] and \
                            pick.waveform_id.station_code == sta:
                            sta_pick = pick
                            break
                else:  # Get them from picks - e.g. returned by get_isctimes
                    if sta in picks.keys() and phase[0:1] in picks[sta]:
                        sta_pick = Pick()
                        sta_pick.time = picks[sta][phase[0:1]]
                if sta_pick is None:
                    print 'No %s pick found for station %s - skipping' % (
                        phase, sta)
                    continue

            # Set location code if prescribed, otherwise use '00' (preferred)
            if sta in stalocs.keys():
                loc = stalocs[sta]
            else:
                loc = '00'
            # Select the channels for this station - skip if none found
            chans = st.select(station=sta, location=loc)
            if len(chans) == 0:  # if nothing for loc='00', try also with ''
                loc = ''
                chans = st.select(station=sta, location=loc)
            if len(chans) == 0:
                print 'No channels found for %s' % sta
                continue
            try:
                inv = client.get_stations(network=chans[0].stats.network,
                                          station=sta,
                                          location=loc,
                                          starttime=torg,
                                          endtime=torg + 100.,
                                          level='response')
            except Exception as e:
                warnings.warn(str(e))
                print 'FDSNWS request failed for trace id %s - skipping' % sta
                continue
            try:
                coordinates = inv[0].get_coordinates(chans[0].id)
            except:
                print 'No coordinates found for station %s, channel %s' % \
                            (sta,chans[0].id)
                continue
            dist, azm, bazm = gps2dist_azimuth(org['latitude'],
                                               org['longitude'],
                                               coordinates['latitude'],
                                               coordinates['longitude'])
            gcarc = locations2degrees(org['latitude'], org['longitude'],
                                      coordinates['latitude'],
                                      coordinates['longitude'])
            if phase == 'R' or phase == 'P':  # Rayleigh or P wave
                try:
                    tr = st.select(station=sta, component='Z', location=loc)[0]
                except IndexError:
                    print 'No vertical for %s:%s' % (sta, loc)
                    continue
                try:
                    inv = client.get_stations(network=tr.stats.network,
                                              station=sta,
                                              channel=tr.stats.channel,
                                              location=loc,
                                              starttime=torg,
                                              endtime=torg + 100.,
                                              level='response')
                except Exception as e:
                    warnings.warn(str(e))
                    print 'FDSNWS request failed for trace id %s - skipping' % tr.id
                    continue
                tr = tr.copy()
                tr.stats.response = inv[0].get_response(tr.id, torg)
                tr.stats.coordinates = inv[0].get_coordinates(tr.id)
                tr.remove_response(pre_filt=frq4[phase], output='DISP')
                tr.stats.gcarc = gcarc
                tr.stats.azimuth = azm
                #t1 = minv[0].get_responeax(tr.stats.starttime,t+dist/rvmax)
                #t2 = min(tr.stats.endtime  ,t+dist/rvmin)
                t1 = max(torg, tr.stats.starttime)
                t2 = min(torg + trngmx, tr.stats.endtime)
                tr.trim(starttime=t1, endtime=t2)
                decim = int(0.01 + delta_T[phase] / tr.stats.delta)
                ch = inv.select(station=sta, channel=tr.stats.channel)[0][0][0]
                print tr.id,' ',tr.stats.sampling_rate,' decimated by ',decim,\
                        'sensitivity=',ch.response.instrument_sensitivity.value
                if tr.stats.starttime - torg < 0.:
                    tr.trim(starttime=torg)
                tr.decimate(factor=decim, no_filter=True)
                tr.data *= 1.e6  # Convert to microns
            elif phase == 'L' or phase == 'SH':  # Love or SH wave
                if len(chans.select(component='E')) != 0:
                    try:
                        tr1a = st.select(station=sta,
                                         component='E',
                                         location=loc)[0]
                        tr2a = st.select(station=sta,
                                         component='N',
                                         location=loc)[0]
                    except:
                        print 'Station %s does not have 2 horizontal componets -skipping' % sta
                        continue
                elif len(chans.select(component='1')) != 0:
                    try:
                        tr1a = st.select(station=sta,
                                         component='1',
                                         location=loc)[0]
                        tr2a = st.select(station=sta,
                                         component='2',
                                         location=loc)[0]
                    except:
                        print 'Station %s does not have 2 horizontal componets -skipping' % sta
                        continue
                tr1 = tr1a.copy()
                tr1.data = tr1a.data.copy()
                tr2 = tr2a.copy()
                tr2.data = tr2a.data.copy()
                ch1 = inv.select(station=sta,
                                 channel=tr1.stats.channel)[0][0][0]
                ch2 = inv.select(station=sta,
                                 channel=tr2.stats.channel)[0][0][0]
                tr1.stats.response = ch1.response
                tr1.remove_response(pre_filt=frq4[phase], output='DISP')
                tr2.stats.response = ch2.response
                tr2.remove_response(pre_filt=frq4[phase], output='DISP')
                strt = max(tr1.stats.starttime, tr2.stats.starttime)
                endt = min(tr1.stats.endtime, tr2.stats.endtime)
                tr1.trim(starttime=strt, endtime=endt)
                tr2.trim(starttime=strt, endtime=endt)
                # Rotate components first to ZNE
                vert, north, east = rotate2zne(tr1.data, ch1.azimuth, 0.,
                                               tr2.data, ch2.azimuth, 0.,
                                               np.zeros(tr1.stats.npts), 0.,
                                               0.)
                radial, transverse = rotate_ne_rt(north, east, bazm)
                tr = Trace(header=tr1.stats, data=transverse)
                tr2 = Trace(header=tr2.stats, data=radial)
                tr.stats.channel = tr.stats.channel[:-1] + 'T'
                # Change one of the invout channels to end in 'T'
                net = inv[-1]
                stn = net[0]
                chn = stn[0]
                chn.code = chn.code[:-1] + 'T'
                #
                tr.stats.gcarc = gcarc
                tr.stats.azimuth = azm
                decim = int(0.01 + delta_T[phase] / tr.stats.delta)
                print tr.id, ' ', tr.stats.sampling_rate, ' decimated by ', decim
                print '%s: sensitivity=%g, azimuth=%g, dip=%g' % (
                    ch1.code, ch1.response.instrument_sensitivity.value,
                    ch1.azimuth, ch1.dip)
                print '%s: sensitivity=%g, azimuth=%g, dip=%g' % (
                    ch2.code, ch2.response.instrument_sensitivity.value,
                    ch2.azimuth, ch2.dip)
                if tr.stats.starttime - torg < 0.:
                    tr.trim(starttime=torg)
                    tr2.trim(starttime=torg)
                tr.decimate(factor=decim, no_filter=True)
                tr2.decimate(factor=decim, no_filter=True)
                tr.radial = 1.e6 * tr2.data
                tr.stats.coordinates = coordinates
                tr.data *= 1.e6  # Convert to microns
            if phase == 'R' or phase == 'L':  # Window according to group velocity window
                gwin = windows[phase]
                tbeg, tend = (dist * .001 / gwin[1], dist * .001 / gwin[0])
                tr.trim(torg + tbeg, torg + tend)
            elif phase == 'P' or phase == 'SH':  # Window by times before and after pick
                tbef, taft = windows[phase]
                tr.trim(sta_pick.time - tbef, sta_pick.time + taft)
                idx = int(0.5 + tbef / tr.stats.delta)
                avg = tr.data[:idx].mean()
                tr.data -= avg
                if not taper is None:
                    itp = int(taper * tr.stats.npts)
                    for i in range(tr.stats.npts - itp, tr.stats.npts):
                        tr.data[i] *= 0.5 * (1. + mt.cos(
                            mt.pi * (i - (tr.stats.npts - itp)) / float(itp)))
                tr.stats.pick = sta_pick
            stobs[phase].append(tr)
            # Appen station inventory to invout
            if invout is None:
                invout = inv
            else:
                invout += inv
        # Pickle to file
    return stobs, invout
예제 #53
0
 def read_in_traces(self, file_list):
     running_stream = Stream()
     for file in file_list:
         running_stream += read(file)
     return running_stream
예제 #54
0
if not mseed_files:
    pass  # XXX print/mail warning

inst = cornFreq2Paz(1.0)
nfft = 4194304  # next nfft of 5h
last_endtime = 0
last_id = "--"

trigger_list = []
summary = []
summary.append("#" * 79)
for file in mseed_files:
    summary.append(file)
    try:
        st = read(file, "MSEED")
        T1 = UTCDateTime("2010-09-20T00:00:00")  # XXX
        T2 = UTCDateTime("2010-09-20T04:00:00")  # XXX
        st.trim(T1, T2)  # XXX
        #st.trim(endtime=st[0].stats.starttime+5000) # XXX
        st.merge(-1)
        st.sort()
        summary.append(str(st))
    except:
        summary.append("skipped!")
        continue
    for tr in st:
        stats = tr.stats
        for parser in parsers:
            try:
                tr.stats.paz = parser.getPAZ(tr.id, tr.stats.starttime)
예제 #55
0
#!/usr/bin/env python3
from obspy.core import read
from numpy import sqrt
t1_hamming = read('t100.hamming.taper.sac')
t1_hanning = read('t100.hanning.taper.sac')
t1_cosine = read('t100.cosine.taper.sac')

t2_hamming = read('t101.hamming.taper.sac')
t2_hanning = read('t101.hanning.taper.sac')
t2_cosine = read('t101.cosine.taper.sac')

cline1 = ''
for i in range(len(t1_hamming[0].data)):
    cline1 = cline1 + '%.8lf, %.8lf, %.8lf\n' % (t1_hamming[0].data[i],
                                                 t1_hanning[0].data[i],
                                                 pow(t1_cosine[0].data[i], 1))
print(cline1)
of = open('../taper100.all.txt', 'w')
of.write(cline1)
of.close()

cline2 = ''
for i in range(len(t2_hamming[0].data)):
    cline2 = cline2 + '%.8lf, %.8lf, %.8lf\n' % (t2_hamming[0].data[i],
                                                 t2_hanning[0].data[i],
                                                 pow(t2_cosine[0].data[i], 1))
print(cline2)
of = open('../taper101.all.txt', 'w')
of.write(cline2)
of.close()
"""
예제 #56
0
    if len(k)!=0:
        tmp  = Inf
        x[k] = tmp(len(k))

    k = np.where(((b > 0) & (p > 0) & (p < 1)))[0]
    #~ print k
    
    if len(k)!=0:
        pk = p[k]
        bk = b[k]
        #~ print pk, bk
        x[k] = np.sqrt((-2*bk ** 2) * np.log(1 - pk))
    return x


if __name__ == "__main__":
    #~ from scipy.io.matlab import loadmat
    #~ v1 = loadmat(r"C:\Users\tlecocq\Documents\Tom's Share\Pack Kurtogram\Pack Kurtogram V3\VOIE1.mat")
    #~ x = v1['v1']
    
    from obspy.core import read
    st = read(os.path.join(r'C:\Users\thomas\Desktop\3069','*.UCC.DOU..HHZ.D.MSEED'))
    #~ st.plot()
    st.merge()
    
    
    Fs = st[0].stats.sampling_rate
    x = st[0].data[600*Fs:720*Fs]
    #~ x = st[0].data
    nlevel= 7
    c = Fast_Kurtogram(x, nlevel, Fs)
예제 #57
0
def main(comps=None):
    logging.basicConfig(level=logging.DEBUG,
                        format='%(asctime)s [%(levelname)s] %(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S')

    logging.info('*** Starting: Compute MWCS ***')

    db = connect()
    if comps == None:
        components_to_compute = get_components_to_compute(db)
    else:
        components_to_compute = comps

    mov_stack = get_config(db, "mov_stack")
    if mov_stack.count(',') == 0:
        mov_stacks = [
            int(mov_stack),
        ]
    else:
        mov_stacks = [int(mi) for mi in mov_stack.split(',')]

    goal_sampling_rate = float(get_config(db, "cc_sampling_rate"))
    maxlag = float(get_config(db, "maxlag"))

    # First we reset all DTT jobs to "T"odo if the REF is new for a given pair
    for station1, station2 in get_station_pairs(db, used=True):
        sta1 = "%s.%s" % (station1.net, station1.sta)
        sta2 = "%s.%s" % (station2.net, station2.sta)
        pair = "%s:%s" % (sta1, sta2)
        if is_dtt_next_job(db, jobtype='DTT', ref=pair):
            logging.info(
                "We will recompute all MWCS based on the new REF for %s" %
                pair)
            reset_dtt_jobs(db, pair)
            update_job(db, "REF", pair, jobtype='DTT', flag='D')

    logging.debug('Ready to compute')
    # Then we compute the jobs
    outfolders = []
    while is_dtt_next_job(db, flag='T', jobtype='DTT'):
        pair, days, refs = get_dtt_next_job(db, flag='T', jobtype='DTT')
        logging.info("There are MWCS jobs for some days to recompute for %s" %
                     pair)
        for f in get_filters(db, all=False):
            filterid = int(f.ref)
            for components in components_to_compute:
                ref_name = pair.replace('.', '_').replace(':', '_')
                rf = os.path.join("STACKS", "%02i" % filterid, "REF",
                                  components, ref_name + ".MSEED")
                ref = read(rf)[0].data
                for day in days:
                    for mov_stack in mov_stacks:
                        df = os.path.join("STACKS", "%02i" % filterid,
                                          "%03i_DAYS" % mov_stack, components,
                                          ref_name,
                                          str(day) + ".MSEED")
                        if os.path.isfile(df):
                            cur = read(df)[0].data
                            logging.debug(
                                'Processing MWCS for: %s.%s.%02i - %s - %02i days'
                                % (ref_name, components, filterid, day,
                                   mov_stack))
                            output = mwcs(cur, ref, f.mwcs_low, f.mwcs_high,
                                          goal_sampling_rate, -maxlag,
                                          f.mwcs_wlen, f.mwcs_step)
                            outfolder = os.path.join('MWCS', "%02i" % filterid,
                                                     "%03i_DAYS" % mov_stack,
                                                     components, ref_name)
                            if outfolder not in outfolders:
                                if not os.path.isdir(outfolder):
                                    os.makedirs(outfolder)
                                outfolders.append(outfolder)
                            np.savetxt(
                                os.path.join(outfolder, "%s.txt" % str(day)),
                                output)
                            del output, cur
        for day in days:
            update_job(db, day, pair, jobtype='DTT', flag='D')

    logging.info('*** Finished: Compute MWCS ***')
예제 #58
0
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 29 12:19:56 2017

@author: horas
"""

from obspy.core import read
from scipy.integrate import simps
import numpy as np
#import matplotlib.pylab as plt
#from obspy.core import stream

st = read ("DNP_full.MSEED")

print st
print '\n'
print st[0].data
print '\n'
print type(st)
print '\n'
print st.traces
print '\n'
print st[0].stats
print '\n'
print st[0].stats.delta, '|', st[0].stats.endtime
print '\n'
print st

print st[0]
예제 #59
0
def Read_event(Year,jJul,Hour,Second,Station, plot):
    """return a stream containing the 3 traces N, Z,E of an event calib =1
   * input :
        - station: station considered ex : '1'
        - year :  year of the event ex 15
        - jJul : julian day
        - Hour : hour of the event ex '08
        - plot : if True plot will be shown  '
    *output : 
        -st : type : stream ; Stream containing the 3 traces ENZ
    * exemples :
      1. ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
        Station, Year,jJul, Hour = ConvertDatestr(B[j])
        ==> (Station, Year,jJul, Hour) = ('2', '15', '001', '00')
       st =  Read_event(Year,jJul,Hour,12, Station, True)
       
      2.  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
       for j in [1,222344,222345]:
    
            Station, Year,jJul, Hour = ConvertDatestr(B[j])
            print ConvertDatestr(B[j])
        
           
            if not  os.path.exists('/home/claire/PHD/Working/Data/Wangrong_seismic_data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHN.SAC'%(Year,jJul,Station,Year,jJul,str(Hour))) : 
                print 'file not existing' , jJul, Hour 
                continue
            Read_event(Year,jJul,Hour,12, Station, True)
         
    """
    #0. Uniformise file format for file reading ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 
    if int(Second)>=3600 : 
        print Second
        h = int(Second)//3600
        Hour=int(Hour)+h
        Second = int(Second)%3600
        if int(Hour)>23 :
            jJul = str(jJul+ 1)
            Hour = str(int(Hour-24))
        else : 
            jJul = str(int(jJul))
        if Hour<10 : 
            Hour = '0'+str(Hour)
                
        if int(jJul)<10:
            jJul = '00'+str(jJul)
        elif int(jJul)<100:
            jJul = '0'+str(jJul) 
   
    #1. reading files ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    st = Stream()
    trE = read('/home/burtin/DATA/LinTianShan/Seismic_Data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHE.SAC'%(Year,jJul,Station,Year,jJul,str(Hour)))[0]   
    trE.stats.calib = 1    
    st.append(trE)  
    trN = read('/home/burtin/DATA/LinTianShan/Seismic_Data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHN.SAC'%(Year,jJul,Station,Year,jJul,str(Hour)))[0]
    trN.stats.calib = 1    
    st.append(trN)  
    trZ = read('/home/burtin/DATA/LinTianShan/Seismic_Data/20%s/R%s.02/GSW0%s.%s.%s.%s.00.00.BHZ.SAC'%(Year,jJul,Station,Year,jJul,str(Hour)))[0]
    trZ.stats.calib = 1    
    st.append(trZ)  
    #2. Plotting waveform~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    if plot == True : 
        fig = plt.figure()
        fig.canvas.set_window_title('Waveform_%s_%s'%(jJul, Hour))
        st.plot(fig=fig)   
    return st
예제 #60
0
def calc_derivatives(Tmin, Tmax, station, comp, derivatives_folder,
                     out_folder):
    derivatives_folder = derivatives_folder.split("kernels")[0]

    lines = open(
        derivatives_folder.split("kernels")[0] + "/kernels_info.txt",
        "r").readlines()
    delta_x = float(lines[0].split()[2])
    delta_y = float(lines[1].split()[2])
    delta_z = float(lines[2].split()[2])

    delta_components = [
        "xp",
        "yp",
        "zp",
        "xm",
        "ym",
        "zm",
        "xpyp",
        "xpym",
        "xmyp",
        "xmym",
        "xpzp",
        "xpzm",
        "xmzp",
        "xmzm",
        "ypzp",
        "ypzm",
        "ymzp",
        "ymzm",
        "point_source",
    ]

    derivative_list = [
        "dSdx",
        "dSdy",
        "dSdz",
        "dS2dx2",
        "dS2dy2",
        "dS2dz2",
        "dSdxdy",
        "dSdxdz",
        "dSdydz",
    ]
    tr = {}
    dS = {}

    for del_comp in delta_components:
        folder = f"{derivatives_folder}{del_comp}/"
        tr_tmp = read(f"{folder}*{station}.MX{comp}.sem.sac")[0]
        tr_tmp.detrend("demean")
        tr_tmp.filter("bandpass",
                      freqmin=1 / Tmax,
                      freqmax=1 / Tmin,
                      corners=4,
                      zerophase=True)
        tr[del_comp, station] = tr_tmp

    for der_comp in derivative_list:
        dS[der_comp] = tr["point_source", station].copy()

    # First derivatives
    dS["dSdx"].data = (tr["xp", station].data -
                       tr["xm", station].data) / (2 * delta_x)
    dS["dSdy"].data = (tr["yp", station].data -
                       tr["ym", station].data) / (2 * delta_y)
    dS["dSdz"].data = (tr["zp", station].data -
                       tr["zm", station].data) / (2 * delta_z)

    # Second derivatives
    dS["dS2dx2"].data = (tr["xp", station].data -
                         2 * tr["point_source", station].data +
                         tr["xm", station].data) / ((delta_x)**2)
    dS["dS2dy2"].data = (tr["yp", station].data -
                         2 * tr["point_source", station].data +
                         tr["ym", station].data) / ((delta_y)**2)
    dS["dS2dz2"].data = (tr["zp", station].data -
                         2 * tr["point_source", station].data +
                         tr["zm", station].data) / ((delta_z)**2)

    dS["dSdxdy"].data = (tr["xpyp", station].data - tr["xpym", station].data -
                         tr["xmyp", station].data +
                         tr["xmym", station].data) / (4 * delta_x * delta_y)
    dS["dSdxdz"].data = (tr["xpzp", station].data - tr["xpzm", station].data -
                         tr["xmzp", station].data +
                         tr["xmzm", station].data) / (4 * delta_x * delta_z)
    dS["dSdydz"].data = (tr["ypzp", station].data - tr["ypzm", station].data -
                         tr["ymzp", station].data +
                         tr["ymzm", station].data) / (4 * delta_y * delta_z)

    for der_comp in derivative_list:
        dS[der_comp].interpolate(sampling_rate=0.5, method="linear")
        dS[der_comp].write(
            f"{out_folder}/{station}_{comp}_{der_comp}.sac",
            format="SAC",
        )
        plt.plot(dS[der_comp].times(), dS[der_comp].data, color="black")
        dS[der_comp].plot(
            outfile=f"{out_folder}/{station}_{comp}_{der_comp}.png")
        plt.close()

    return