Beispiel #1
0
 def test_writeIntegersViaObsPy(self):
     """
     Write file test via L{obspy.Trace}.
     """
     tempfile = NamedTemporaryFile().name
     npts = 1000
     # data cloud of integers - float won't work!
     np.random.seed(815)  # make test reproducable
     data = np.random.randint(-1000, 1000, npts).astype('int32')
     stats = {'network': 'BW', 'station': 'TEST', 'location': '',
              'channel': 'EHE', 'npts': npts, 'sampling_rate': 200.0}
     start = UTCDateTime(2000, 1, 1)
     stats['starttime'] = start
     tr = Trace(data=data, header=stats)
     st = Stream([tr])
     st.verify()
     # write
     st.write(tempfile, format="GSE2")
     # read again
     stream = read(tempfile)
     os.remove(tempfile)
     stream.verify()
     np.testing.assert_equal(data, stream[0].data)
     # test default attributes
     self.assertEqual('CM6', stream[0].stats.gse2.datatype)
     self.assertEqual(-1, stream[0].stats.gse2.vang)
     self.assertEqual(1.0, stream[0].stats.gse2.calper)
     self.assertEqual(1.0, stream[0].stats.calib)
Beispiel #2
0
def axisem2mseed_all(path):
    """
    change .dat files into MSEED format
    """
    global test_param

    t = UTCDateTime(0)
    traces = []

    for file in glob.iglob(os.path.join(path, '*.dat')):
        stationID = file.split('/')[-1].split('_')[0]
        chan = file.split('/')[-1].split('_')[-1].split('.')[0]
        dat = np.loadtxt(file)
        npts = len(dat[:,0])
        stats = {'network': 'SG',
                 'station': stationID,
                 'location': '',
                 'channel': chan,
                 'npts': npts,
                 'sampling_rate': (npts - 1.)/(dat[-1,0] - dat[0,0]),
                 'starttime': t + dat[0,0],
                 'mseed' : {'dataquality': 'D'}}
        traces.append(Trace(data=dat[:,1], header=stats))

    st = Stream(traces)
    st.sort()
    fname =  os.path.join(path, 'seismograms.mseed')
    print fname
    st.write(fname, format='MSEED')
    def write_adjoint_traces(self, path, syn, dat, channel):
        """ Computes adjoint traces from observed and synthetic traces
        """
        nt, dt, _ = self.get_time_scheme(syn)
        nr, _ = self.get_network_size(syn)

        Del = np.loadtxt(path +'/'+ '../../delta_syn_ij')
        rsd = np.loadtxt(path +'/'+ '../../rsd_ij')

        # initialize trace arrays
        adj = Stream()
        for i in range(nr):
            adj.append(Trace(
                data=np.zeros(nt, dtype='float32'),
                header=syn[i].stats))

        # generate adjoint traces
        for i in range(nr):
            for j in range(i):
                si = syn[i].data
                sj = syn[j].data

                adj[i].data += rsd[i,j] * \
                               self.adjoint_dd(si, sj, +Del[i,j], nt, dt)
                adj[j].data -= rsd[i,j] * \
                               self.adjoint_dd(sj, si, -Del[i,j], nt, dt)


        # optional weighting
        adj = self.apply_weights(adj)

        # write adjoint traces
        self.writer(adj, path, channel)
Beispiel #4
0
 def test_convert2Sac(self):
     """
     Test that an obspy trace is correctly written to SAC.
     All the header variables which are tagged as required by
     http://www.iris.edu/manuals/sac/SAC_Manuals/FileFormatPt2.html
     are controlled in this test
     also see http://www.iris.edu/software/sac/manual/file_format.html
     """
     # setUp is called before every test, not only once at the
     # beginning, that is we allocate the data just here
     # generate artificial mseed data
     np.random.seed(815)
     head = {'network': 'NL', 'station': 'HGN', 'location': '00',
             'channel': 'BHZ', 'calib': 1.0, 'sampling_rate': 40.0,
             'starttime': UTCDateTime(2003, 5, 29, 2, 13, 22, 43400)}
     data = np.random.randint(0, 5000, 11947).astype("int32")
     st = Stream([Trace(header=head, data=data)])
     # write them as SAC
     tmpfile = NamedTemporaryFile().name
     st.write(tmpfile, format="SAC")
     st2 = read(tmpfile, format="SAC")
     # file must exist, we just created it
     os.remove(tmpfile)
     # check all the required entries (see url in docstring)
     self.assertEqual(st2[0].stats.starttime, st[0].stats.starttime)
     self.assertEqual(st2[0].stats.npts, st[0].stats.npts)
     self.assertEqual(st2[0].stats.sac.nvhdr, 6)
     self.assertAlmostEqual(st2[0].stats.sac.b, 0.000400)
     # compare with correct digit size (nachkommastellen)
     self.assertAlmostEqual((0.0004 + (st[0].stats.npts - 1) * \
                            st[0].stats.delta) / st2[0].stats.sac.e, 1.0)
     self.assertEqual(st2[0].stats.sac.iftype, 1)
     self.assertEqual(st2[0].stats.sac.leven, 1)
     self.assertAlmostEqual(st2[0].stats.sampling_rate / \
                            st[0].stats.sampling_rate, 1.0)
Beispiel #5
0
def merge_single(nch,dstart,dend):
  '''Merges traces of one channel to larger traces. Used for cross-correlation'''

  # here you load all the functions you need to use
  from obspy.seg2.seg2 import readSEG2
  from obspy.core import Stream


  dataDir2 = "/import/neptun-radler/STEINACH_feb/"
  dataDir = "/import/three-data/hadzii/STEINACH/STEINACH_longtime/"
  outdir = "/home/jsalvermoser/Desktop/Processing/out_merged"

  tr = []

  for k in range(dstart, dend, 1):
    fname = '%d' %(k)
    fileName = fname + ".dat" 
    st = readSEG2(dataDir + fileName)
    #st.detrend('linear')
    tr.append(st[nch-1])


  
  new_stream = Stream(traces=tr)
  new_stream.merge(method=1, fill_value='interpolate')

  start = new_stream[0].stats.starttime
  end = new_stream[0].stats.endtime

  timeframe = str(nch)+ "_" + str(start.year) +'.'+ str(start.julday) +'.'+ str(start.hour) +'.'+ str(start.minute) +'.'+ str(start.second) \
      +'-'+ str(end.year) +'.'+ str(end.julday) +'.'+ str(end.hour) +'.'+ str(end.minute) +'.'+ str(end.second)

  new_stream.write(outdir + timeframe + ".mseed", format="MSEED")

  return new_stream[0]
 def test_writeAndReadDifferentEncodings(self):
     """
     Writes and read a file with different encoding via the obspy.core
     methods.
     """
     npts = 1000
     np.random.seed(815)  # make test reproducable
     data = np.random.randn(npts).astype('float64') * 1e3 + .5
     st = Stream([Trace(data=data)])
     # Loop over some record lengths.
     for encoding, value in ENCODINGS.iteritems():
         seed_dtype = value[2]
         tempfile = NamedTemporaryFile().name
         # Write it once with the encoding key and once with the value.
         st[0].data = data.astype(seed_dtype)
         st.verify()
         st.write(tempfile, format="MSEED", encoding=encoding)
         st2 = read(tempfile)
         del st2[0].stats.mseed
         np.testing.assert_array_equal(st[0].data, st2[0].data)
         del st2
         ms = _MSStruct(tempfile)
         ms.read(-1, 1, 1, 0)
         self.assertEqual(ms.msr.contents.encoding, encoding)
         del ms  # for valgrind
         os.remove(tempfile)
Beispiel #7
0
def merge_single(nch,dstart,dend):
  '''Merges traces of one channel to larger traces. Used for cross-correlation'''

  # here you load all the functions you need to use
  from obspy.seg2.seg2 import readSEG2
  from obspy.core import Stream


  # directories:
  dataDir2 = "/import/neptun-radler/STEINACH_feb/"
  dataDir = "/import/three-data/hadzii/STEINACH/STEINACH_longtime/"


  tr = []

  for k in range(dstart, dend, 1):
    fname = '%d' %(k)
    fileName = fname + ".dat" 
    st = readSEG2(dataDir + fileName)
    tr.append(st[nch-1])


  
  new_stream = Stream(traces=tr)
  new_stream.merge(method=1, fill_value='interpolate')


  return new_stream
Beispiel #8
0
def export_sac(db, filename, pair, components, filterid, corr, ncorr=0,
               sac_format=None, maxlag=None, cc_sampling_rate=None):
    if sac_format is None:
        sac_format = get_config(db, "sac_format")
    if maxlag is None:
        maxlag = float(get_config(db, "maxlag"))
    if cc_sampling_rate is None:
        cc_sampling_rate = float(get_config(db, "cc_sampling_rate"))
    try:
        os.makedirs(os.path.split(filename)[0])
    except:
        pass
    filename += ".SAC"
    mytrace = Trace(data=corr)
    mytrace.stats['station'] = pair
    mytrace.stats['sampling_rate'] = cc_sampling_rate
    if maxlag:
        mytrace.stats.starttime = -maxlag
    mytrace.stats.sac = AttribDict()
    mytrace.stats.sac.depmin = np.min(corr)
    mytrace.stats.sac.depmax = np.max(corr)
    mytrace.stats.sac.depmen = np.mean(corr)
    mytrace.stats.sac.scale = 1
    mytrace.stats.sac.npts = len(corr)

    st = Stream(traces=[mytrace, ])
    st.write(filename, format='SAC')
    del st
    return
Beispiel #9
0
def ascii2sac(sismograma):
    Wav, Head = [], []
    r=open(sismograma, 'rU')
    
    counter = 0
    for linea in r:
        counter+=1
        if counter <= 5:
            #               print linea
            Head.append(linea.strip())
        else:
            Wav.append(linea)
#print Head

    date = Head[2][0:10]
    hour = Head[2][11:19]
    Fs = float(Head[3][0:8])
    counts = float(Head[4][0:4])
    station = Head[1]
    #si la estacion es triaxial:
    channel = Head[1][3]
    esta = station[0:3]
    
    print date, hour, str(Fs), str(counts), station
    
    data = np.array(Wav, dtype=np.float32)
    #    print data
    
    if channel == 'Z':
        stats = {'network': 'OP', 'station': esta , 'location': '',
        'channel': 'BHZ', 'npts': len(data), 'sampling_rate': Fs,
        'mseed': {'dataquality': 'D'}}
    elif channel == 'N':
        stats = {'network': 'OP', 'station': esta , 'location': '',
        'channel': 'BHN', 'npts': len(data), 'sampling_rate': Fs,
        'mseed': {'dataquality': 'D'}}
    elif channel == 'E':
        stats = {'network': 'OP', 'station': esta , 'location': '',
        'channel': 'BHE', 'npts': len(data), 'sampling_rate': Fs,
        'mseed': {'dataquality': 'D'}}
    else:
        stats = {'network': 'OP', 'station': station , 'location': '',
        'channel': 'SHZ', 'npts': len(data), 'sampling_rate': Fs,
        'mseed': {'dataquality': 'D'}}

    Date = date+hour
    Date = datetime.datetime.strptime(Date, '%Y/%m/%d%H:%M:%S')
    starttime = UTCDateTime(Date)
    stats['starttime'] = starttime

    print stats
    
    st = Stream([Trace(data=data, header=stats)])
    st.write(r.name[0:-4]+'.sac', format='SAC', encoding=4)
    #    st1 = read(r.name[0:-4]+'.mseed')
    #    print st1
    #    st1.plot(color='r')
    return('traza convertida')

#ascii2sac(sismo)
def process_syn(stream,starttime,endtime,sampling_rate,npts,filt_freq,max_percentage=0.05) :

    stream_process = Stream()
    for tr in stream :
        new_tr=tr.copy()
        cut_func(new_tr, starttime, endtime)

        # detrend, demean, taper
        new_tr.detrend("linear")
        new_tr.detrend("demean")
        new_tr.taper(max_percentage=max_percentage, type="hann")

        # geometric compensation
        # filter and interpolation
        filter_synt(new_tr, filt_freq)
        new_tr.interpolate(sampling_rate=sampling_rate,\
                starttime=new_tr.stats.starttime,npts=npts)

        # detrend, demean, taper
        new_tr.detrend("linear")
        new_tr.detrend("demean")
        new_tr.taper(max_percentage=max_percentage, type="hann")

        new_tr.data = np.require(new_tr.data, dtype=np.float32)
        stream_process.append(new_tr)

    return stream_process
Beispiel #11
0
 def test_writeStreamViaObsPy(self):
     """
     Write streams, i.e. multiple files via L{obspy.Trace}
     """
     testdata = np.array([111, 111, 111, 111, 111, 109, 106, 103, 103,
                          110, 121, 132, 139])
     testfile = NamedTemporaryFile().name
     self.file = os.path.join(self.path, '3cssan.reg.8.1.RNON.wav')
     tr = read(self.file, format='WAV')[0]
     np.testing.assert_array_equal(tr.data[:13], testdata)
     # write
     st2 = Stream([Trace(), Trace()])
     st2[0].data = tr.data.copy()       # copy the data
     st2[1].data = tr.data.copy() // 2  # be sure data are different
     st2.write(testfile, format='WAV', framerate=7000)
     # read without giving the WAV format option
     base, ext = os.path.splitext(testfile)
     testfile0 = "%s%03d%s" % (base, 0, ext)
     testfile1 = "%s%03d%s" % (base, 1, ext)
     tr30 = read(testfile0)[0]
     tr31 = read(testfile1)[0]
     self.assertEqual(tr30.stats, tr.stats)
     self.assertEqual(tr31.stats, tr.stats)
     np.testing.assert_array_equal(tr30.data[:13], testdata)
     np.testing.assert_array_equal(tr31.data[:13], testdata // 2)
     os.remove(testfile)
     os.remove(testfile0)
     os.remove(testfile1)
Beispiel #12
0
def export_sac(db, filename, pair, components, filterid, corr, ncorr=0, sac_format=None, maxlag=None, cc_sampling_rate=None):
    if sac_format is None:
        sac_format = get_config(db, "sac_format")
    if maxlag is None:
        maxlag = float(get_config(db, "maxlag"))
    if cc_sampling_rate is None:
        cc_sampling_rate = float(get_config(db, "cc_sampling_rate"))
    try:
        os.makedirs(os.path.split(filename)[0])
    except:
        pass
    filename += ".SAC"
    mytrace = Trace(data=corr)
    mytrace.stats['station'] = pair
    mytrace.stats['sampling_rate'] = cc_sampling_rate

    st = Stream(traces=[mytrace, ])
    st.write(filename, format='SAC')
    tr = SacIO(filename)
    if sac_format == "doublets":
        tr.SetHvalue('A', 120)
    else:
        tr.SetHvalue('B', -maxlag)
        tr.SetHvalue('DEPMIN', np.min(corr))
        tr.SetHvalue('DEPMAX', np.max(corr))
        tr.SetHvalue('DEPMEN', np.mean(corr))
        tr.SetHvalue('SCALE', 1)
        tr.SetHvalue('NPTS', len(corr))
    tr.WriteSacBinary(filename)
    del st, tr
    return
 def test_Header(self):
     """
     Tests whether the header is correctly written and read.
     """
     tempfile = NamedTemporaryFile().name
     np.random.seed(815)  # make test reproducable
     data = np.random.randint(-1000, 1000, 50).astype('int32')
     stats = {'network': 'BW', 'station': 'TEST', 'location': 'A',
              'channel': 'EHE', 'npts': len(data), 'sampling_rate': 200.0,
              'mseed': {'record_length': 512, 'encoding': 'STEIM2',
                        'filesize': 512, 'dataquality': 'D',
                        'number_of_records': 1, 'byteorder': '>'}}
     stats['starttime'] = UTCDateTime(2000, 1, 1)
     st = Stream([Trace(data=data, header=stats)])
     # Write it.
     st.write(tempfile, format="MSEED")
     # Read it again and delete the temporary file.
     stream = read(tempfile)
     os.remove(tempfile)
     stream.verify()
     # Loop over the attributes to be able to assert them because a
     # dictionary is not a stats dictionary.
     # This also assures that there are no additional keys.
     for key in stats.keys():
         self.assertEqual(stats[key], stream[0].stats[key])
Beispiel #14
0
 def set_dummy(self):
     """
     Create a dummy stream object. This is used by the sm_gui.py script 
     in case no data is found.
     """
     smdict = {}
     smdict['lat'] = 0.
     smdict['lon'] = 0.
     smdict['site'] = 'None'
     smdict['site-name'] = 'None'
     smdict['instrument'] = 'None'
     smdict['eventtime'] = UTCDateTime(1970, 1, 1, 0, 0, 0, 0) 
     smdict['hypodep'] = 0.
     smdict['centdep'] = 0.
     smdict['lilax'] = 0.
     smdict['compdir'] = 0.
     smdict['epicdist'] = 0.
     smdict['prepend'] = 0.
     smdict['append'] = 0.
     smdict['Ml'] = 0.
     smdict['Ms'] = 0.
     smdict['Mw'] = 0.
     smdict['Mb'] = 0.
     st = Stream()
     stats = {'network': '', 'delta': 1.0,
              'station': 'No data available', 'location': '',
              'starttime': UTCDateTime(1970, 1, 1, 0, 0, 0, 0),
              'npts': 100, 'calib': 1.0,
              'sampling_rate': 1.0, 'channel': 'None','smdict':smdict}
     data = np.zeros(100)
     for i in xrange(9):
         st.append(Trace(data,stats))
     return st
 def test_writeAndReadDifferentRecordLengths(self):
     """
     Tests Mini-SEED writing and record lengths.
     """
     # libmseed instance.
     npts = 6000
     np.random.seed(815)  # make test reproducable
     data = np.random.randint(-1000, 1000, npts).astype('int32')
     st = Stream([Trace(data=data)])
     record_lengths = [256, 512, 1024, 2048, 4096, 8192]
     # Loop over some record lengths.
     for rec_len in record_lengths:
         # Write it.
         tempfile = NamedTemporaryFile().name
         st.write(tempfile, format="MSEED", reclen=rec_len)
         # Get additional header info
         info = util.getRecordInformation(tempfile)
         # Test reading the two files.
         temp_st = read(tempfile)
         np.testing.assert_array_equal(data, temp_st[0].data)
         del temp_st
         os.remove(tempfile)
         # Check record length.
         self.assertEqual(info['record_length'], rec_len)
         # Check if filesize is a multiple of the record length.
         self.assertEqual(info['filesize'] % rec_len, 0)
Beispiel #16
0
 def setUp(self):
     # directory where the test files are located
     self.path = os.path.join(os.path.dirname(__file__), 'data')
     self.filename_css = os.path.join(self.path, 'test_css.wfdisc')
     self.filename_nnsa = os.path.join(self.path, 'test_nnsa.wfdisc')
     # set up stream for validation
     header = {}
     header['station'] = 'TEST'
     header['starttime'] = UTCDateTime(1296474900.0)
     header['sampling_rate'] = 80.0
     header['calib'] = 1.0
     header['calper'] = 1.0
     header['_format'] = 'CSS'
     filename = os.path.join(self.path, '201101311155.10.ascii.gz')
     with gzip.open(filename, 'rb') as fp:
         data = np.loadtxt(fp, dtype=np.int_)
     # traces in the test files are sorted ZEN
     st = Stream()
     for x, cha in zip(data.reshape((3, 4800)), ('HHZ', 'HHE', 'HHN')):
         # big-endian copy
         tr = Trace(x, header.copy())
         tr.stats.station += 'be'
         tr.stats.channel = cha
         st += tr
         # little-endian copy
         tr = Trace(x, header.copy())
         tr.stats.station += 'le'
         tr.stats.channel = cha
         st += tr
     self.st_result_css = st.copy()
     for tr in st:
         tr.stats['_format'] = "NNSA_KB_CORE"
     self.st_result_nnsa = st
def ascii(path, filenames):
    """ Reads SPECFEM3D-style ascii data
    """
    from numpy import loadtxt
    from obspy.core import Stream, Stats, Trace

    stream = Stream()
    for filename in filenames:
        stats = Stats()
        data = loadtxt(path +'/'+ filename)

        stats.filename = filename
        stats.starttime = data[0,0]
        stats.sampling_rate = data[0,1] - data[0,0]
        stats.npts = len(data[:,0])

        try:
            parts = filename.split('.')
            stats.network = parts[0]
            stats.station = parts[1]
            stats.channel = temp[2]
        except:
            pass

        stream.append(Trace(data=data[:,1], header=stats))

    return stream
Beispiel #18
0
def split_traces(s, length_in_sec, min_len, verbose, ofid):
    """
    Split an ObsPy stream object with multiple traces into a stream with traces of a predefined
    maximum length.
    """

    s_new = Stream()

    # - loop through traces ------------------------------------------------------------------------

    for k in np.arange(len(s)):

        # - set initial start time
        start = s[k].stats.starttime

        # - march through the trace until the endtime is reached
        while start < s[k].stats.endtime - min_len:
            s_copy = s[k].copy()
            s_copy.trim(start, start + length_in_sec - 1 / (s[k].stats.sampling_rate))
            s_new.append(s_copy)
            del s_copy
            collect()
            start += length_in_sec

    return s_new
Beispiel #19
0
def get_waveforms(session, wfdisc, station=None, channel=None, starttime=None, 
        endtime=None, wfids=None):
    """
    Get waveforms.

    Parameters
    ----------
    session : sqlalchemy.orm.Session instance
        Must be bound.
    wfdisc : mapped Wfdisc table class
    station, channel : str, optional
        Desired station, channel code strings
    starttimes, endtimes : float, optional
        Epoch start times, end times.  
        Traces will be cut to these times.
    wfids : iterable of int, optional
        Wfdisc wfids.  Obviates the above arguments and just returns full Wfdisc
        row waveforms.

    Returns
    -------
    obspy.Stream
        Traces are merged and cut to requested times.

    """
    #TODO: add evids= option?, use with stawin= option in .execute method?
    #TODO: implement get_arrivals if arrivals=True
    Wfdisc = wfdisc

    st = Stream()
    if not wfids:
        t1 = float(starttime)
        t2 = float(endtime)
        sta = station
        chan = channel

        t1_utc = UTCDateTime(float(t1))
        t2_utc = UTCDateTime(float(t2))

        wfs = get_wfdisc_rows( session,Wfdisc, sta, chan, t1, t2)

        #TODO: do arrival stuff here
        for wf in wfs:
            try:
                tr = wfdisc2trace(wf)
                tr.trim(t1_utc, t2_utc)
                st.append(tr)
            except AttributeError:
                #tr is None b/c data couldn't be read
                pass
    else:
        wfs = get_wfdisc_rows( session,Wfdisc, wfids=wfids)
        for wf in wfs:
            try:
                tr = wfdisc2trace(wf)
                st.append(tr)
            except AttributeError:
                pass

    return st
Beispiel #20
0
def selectUniqueTraces(tr,args):

    # Test on orizontal component, since if only vertical component exists, 
    # no xcorr on horiz allowed --> crash
  
    st = Stream()
    List = []
    ST = []
    CleanList = []

    for i in range(len(tr)):
        if(tr[i].stats.channel[2:3] == "N"):
           List.append(tr[i].stats.station)

    for i in range(len(tr)):
       a = List.count(tr[i].stats.station)
       if(a > 1):
           ST.append(tr[i].stats.station)

    d = Counter(ST)
    for key in d:
        CleanList.append(key) 
    
    for i in range(len(tr)):
        if CleanList.count(tr[i].stats.station) == 0:
           st.append(tr[i]) 

    return st 
Beispiel #21
0
 def test_mergePreviews2(self):
     """
     Test case for issue #84.
     """
     # Note: explicitly creating np.ones instead of np.empty in order to
     # prevent NumPy warnings related to max function
     tr1 = Trace(data=np.ones(2880))
     tr1.stats.starttime = UTCDateTime("2010-01-01T00:00:00.670000Z")
     tr1.stats.delta = 30.0
     tr1.stats.preview = True
     tr1.verify()
     tr2 = Trace(data=np.ones(2881))
     tr2.stats.starttime = UTCDateTime("2010-01-01T23:59:30.670000Z")
     tr2.stats.delta = 30.0
     tr2.stats.preview = True
     tr2.verify()
     st1 = Stream([tr1, tr2])
     st1.verify()
     # merge
     st2 = mergePreviews(st1)
     st2.verify()
     # check
     self.assertTrue(st2[0].stats.preview)
     self.assertEqual(st2[0].stats.starttime, tr1.stats.starttime)
     self.assertEqual(st2[0].stats.endtime, tr2.stats.endtime)
     self.assertEqual(st2[0].stats.npts, 5760)
     self.assertEqual(len(st2[0]), 5760)
Beispiel #22
0
 def test_readAndWriteViaObsPy(self):
     """
     Read and Write files via L{obspy.Stream}
     """
     # read trace
     tr = read(self.file)[0]
     # write comparison trace
     st2 = Stream()
     st2.traces.append(Trace())
     tr2 = st2[0]
     tr2.data = copy.deepcopy(tr.data)
     tr2.stats = copy.deepcopy(tr.stats)
     tempfile = NamedTemporaryFile().name
     st2.write(tempfile, format="SAC")
     # read comparison trace
     tr3 = read(tempfile)[0]
     os.remove(tempfile)
     # check if equal
     self.assertEqual(tr3.stats["station"], tr.stats["station"])
     self.assertEqual(tr3.stats.npts, tr.stats.npts)
     self.assertEqual(tr.stats["sampling_rate"], tr.stats["sampling_rate"])
     self.assertEqual(tr.stats.get("channel"), tr.stats.get("channel"))
     self.assertEqual(tr.stats.get("starttime"), tr.stats.get("starttime"))
     self.assertEqual(tr.stats.sac.get("nvhdr"), tr.stats.sac.get("nvhdr"))
     np.testing.assert_equal(tr.data, tr3.data)
Beispiel #23
0
def noise_window_trace(st,window_len_time):
    """Get noise window for st"""
    noise_st = Stream()
    for tr in st:
        noise_tr = tr.slice(tr.stats.starttime,tr.stats.starttime+window_len_time)
        noise_st.append(noise_tr)
    return noise_st
Beispiel #24
0
    def add(self, stream, verbose=False):
        """
        Process all traces with compatible information and add their spectral
        estimates to the histogram containg the probabilistic psd.
        Also ensures that no piece of data is inserted twice.

        :type stream: :class:`~obspy.core.stream.Stream` or
                :class:`~obspy.core.trace.Trace`
        :param stream: Stream or trace with data that should be added to the
                probabilistic psd histogram.
        :returns: True if appropriate data were found and the ppsd statistics
                were changed, False otherwise.
        """
        # return later if any changes were applied to the ppsd statistics
        changed = False
        # prepare the list of traces to go through
        if isinstance(stream, Trace):
            stream = Stream([stream])
        # select appropriate traces
        stream = stream.select(id=self.id,
                               sampling_rate=self.sampling_rate)
        # save information on available data and gaps
        self.__insert_data_times(stream)
        self.__insert_gap_times(stream)
        # merge depending on skip_on_gaps set during __init__
        stream.merge(self.merge_method, fill_value=0)

        for tr in stream:
            # the following check should not be necessary due to the select()..
            if not self.__sanity_check(tr):
                msg = "Skipping incompatible trace."
                warnings.warn(msg)
                continue
            t1 = tr.stats.starttime
            t2 = tr.stats.endtime
            while t1 + PPSD_LENGTH <= t2:
                if self.__check_time_present(t1):
                    msg = "Already covered time spans detected (e.g. %s), " + \
                          "skipping these slices."
                    msg = msg % t1
                    warnings.warn(msg)
                else:
                    # throw warnings if trace length is different
                    # than one hour..!?!
                    slice = tr.slice(t1, t1 + PPSD_LENGTH)
                    # XXX not good, should be working in place somehow
                    # XXX how to do it with the padding, though?
                    success = self.__process(slice)
                    if success:
                        self.__insert_used_time(t1)
                        if verbose:
                            print t1
                        changed = True
                t1 += PPSD_STRIDE  # advance half an hour

            # enforce time limits, pad zeros if gaps
            #tr.trim(t, t+PPSD_LENGTH, pad=True)
        return changed
Beispiel #25
0
    def test_searchFlagInBlockette(self):
        """
        Test case for obspy.io.mseed.util._search_flag_in_blockette
        """
        # Write dummy file
        npts = 2000
        np.random.seed(42)  # make test reproducible
        data = np.random.randint(-1000, 1000, npts).astype(np.int32)
        # This header ensures presence of blockettes 1000 and 1001
        stat_header = {'network': 'NE', 'station': 'STATI', 'location': 'LO',
                       'channel': 'CHA', 'npts': len(data), 'sampling_rate': 1,
                       'mseed': {'dataquality': 'D',
                                 'blkt1001': {'timing_quality': 63}}}
        stat_header['starttime'] = UTCDateTime(datetime(2012, 8, 1,
                                                        12, 0, 0, 42))
        trace1 = Trace(data=data, header=stat_header)
        st = Stream([trace1])
        with NamedTemporaryFile() as tf:
            st.write(tf, format="mseed", encoding=11, reclen=512)
            tf.seek(0, os.SEEK_SET)
            file_name = tf.name

            with open(file_name, "rb") as file_desc:
                file_desc.seek(0, os.SEEK_SET)
                # Test from file start
                read_bytes = util._search_flag_in_blockette(
                    file_desc, 48, 1001, 4, 1)
                self.assertFalse(read_bytes is None)
                self.assertEqual(unpack(native_str(">B"), read_bytes)[0], 63)

                # Test from middle of a record header
                file_desc.seek(14, os.SEEK_CUR)
                file_pos = file_desc.tell()
                read_bytes = util._search_flag_in_blockette(
                    file_desc, 34, 1000, 6, 1)
                self.assertFalse(read_bytes is None)
                self.assertEqual(unpack(native_str(">B"), read_bytes)[0], 9)
                # Check that file_desc position has not changed
                self.assertEqual(file_desc.tell(), file_pos)

                # Test from middle of a record data
                file_desc.seek(60, os.SEEK_CUR)
                read_bytes = util._search_flag_in_blockette(
                    file_desc, -26, 1001, 5, 1)
                self.assertFalse(read_bytes is None)
                self.assertEqual(unpack(native_str(">B"), read_bytes)[0], 42)

                # Test another record. There is at least 3 records in a
                # mseed with 2000 data points and 512 bytes record length
                file_desc.seek(1040, os.SEEK_SET)
                read_bytes = util._search_flag_in_blockette(file_desc,
                                                            32, 1001, 4, 1)
                self.assertEqual(unpack(native_str(">B"), read_bytes)[0], 63)

                # Test missing blockette
                read_bytes = util._search_flag_in_blockette(file_desc,
                                                            32, 201, 4, 4)
                self.assertIs(read_bytes, None)
 def test_SavingSmallASCII(self):
     """
     Tests writing small ASCII strings.
     """
     tempfile = NamedTemporaryFile().name
     st = Stream()
     st.append(Trace(data=np.fromstring("A" * 8, "|S1")))
     st.write(tempfile, format="MSEED")
     os.remove(tempfile)
Beispiel #27
0
    def _writeData(self, traceData, stats, timeObj):
        streamObj = Stream([Trace(data=traceData, header=stats)])

        filename = self._prepareFilename(timeObj)
        offset = int(np.mean(streamObj.traces[0].data))
        streamObj.traces[0].data = np.array([x - offset for x in streamObj.traces[0].data])
        
        self.logger.debug("["+ strftime('%X') + "] Saving %d samples (corrected by %d) to %s..." % (len(traceData), offset, filename))
        streamObj.write(filename, format='MSEED')
Beispiel #28
0
 def test_writeViaObsPy(self):
     """
     Writing artificial files via L{obspy.Stream}
     """
     st = Stream(traces=[Trace(header={"sac": {}}, data=self.testdata)])
     tempfile = NamedTemporaryFile().name
     st.write(tempfile, format="SAC")
     tr = read(tempfile)[0]
     os.remove(tempfile)
     np.testing.assert_array_almost_equal(self.testdata, tr.data)
Beispiel #29
0
    def getWaveform(self, network, station, location, channel, starttime,
                    endtime, format="MSEED"):
        """
        Retrieves waveform data from the NERIES Web service and returns a ObsPy
        Stream object.

        :type network: str
        :param network: Network code, e.g. ``'BW'``.
        :type station: str
        :param station: Station code, e.g. ``'MANZ'``.
        :type location: str
        :param location: Location code, e.g. ``'01'``. Location code may
            contain wild cards.
        :type channel: str
        :param channel: Channel code, e.g. ``'EHE'``. . Channel code may
            contain wild cards.
        :type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param starttime: Start date and time.
        :type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
        :param endtime: End date and time.
        :type format: ``'FSEED'`` or ``'MSEED'``, optional
        :param format: Output format. Either as full SEED (``'FSEED'``) or
            Mini-SEED (``'MSEED'``) volume. Defaults to ``'MSEED'``.
        :return: ObsPy :class:`~obspy.core.stream.Stream` object.

        .. rubric:: Example

        >>> from obspy.neries import Client
        >>> client = Client(user='******')
        >>> dt = UTCDateTime("2009-04-01T00:00:00")
        >>> st = client.getWaveform("NL", "WIT", "", "BH*", dt, dt+30)
        >>> print st  # doctest: +ELLIPSIS
        3 Trace(s) in Stream:
        NL.WIT..BHZ | 2009-04-01T00:00:00.010200Z - ... | 40.0 Hz, 1201 samples
        NL.WIT..BHN | 2009-04-01T00:00:00.010200Z - ... | 40.0 Hz, 1201 samples
        NL.WIT..BHE | 2009-04-01T00:00:00.010200Z - ... | 40.0 Hz, 1201 samples
        """
        tf = NamedTemporaryFile()
        self.saveWaveform(tf._fileobj, network, station, location, channel,
                          starttime, endtime, format=format)
        # read stream using obspy.mseed
        tf.seek(0)
        try:
            stream = read(tf.name, 'MSEED')
        except:
            stream = Stream()
        tf.close()
        # remove temporary file:
        try:
            os.remove(tf.name)
        except:
            pass
        # trim stream
        stream.trim(starttime, endtime)
        return stream
Beispiel #30
0
 def test_setVersion(self):
     """
     Tests if SAC version is set when writing
     """
     tempfile = NamedTemporaryFile().name
     np.random.seed(815)
     st = Stream([Trace(data=np.random.randn(1000))])
     st.write(tempfile, format="SAC")
     st2 = read(tempfile, format="SAC")
     os.remove(tempfile)
     self.assertEqual(st2[0].stats["sac"].nvhdr, 6)
Beispiel #31
0
def test_create_empty_trace():
    """TimeseriesUtility_test.test_create_empty_trace()"""
    trace1 = _create_trace([1, 1, 1, 1, 1], "H", UTCDateTime("2018-01-01"))
    trace2 = _create_trace([2, 2], "E", UTCDateTime("2018-01-01"))
    observatory = "Test"
    interval = "minute"
    network = "NT"
    location = "R0"
    trace3 = TimeseriesUtility.create_empty_trace(
        starttime=trace1.stats.starttime,
        endtime=trace1.stats.endtime,
        observatory=observatory,
        channel="F",
        type="variation",
        interval=interval,
        network=network,
        station=trace1.stats.station,
        location=location,
    )
    timeseries = Stream(traces=[trace1, trace2])
    # For continuity set stats to be same for all traces
    for trace in timeseries:
        trace.stats.observatory = observatory
        trace.stats.type = "variation"
        trace.stats.interval = interval
        trace.stats.network = network
        trace.stats.station = trace1.stats.station
        trace.stats.location = location
    timeseries += trace3
    assert_equal(len(trace3.data), trace3.stats.npts)
    assert_equal(timeseries[0].stats.starttime, timeseries[2].stats.starttime)
    TimeseriesUtility.pad_timeseries(
        timeseries=timeseries,
        starttime=trace1.stats.starttime,
        endtime=trace1.stats.endtime,
    )
    assert_equal(len(trace3.data), trace3.stats.npts)
    assert_equal(timeseries[0].stats.starttime, timeseries[2].stats.starttime)
    # Change starttime by more than 1 delta
    starttime = trace1.stats.starttime
    endtime = trace1.stats.endtime
    TimeseriesUtility.pad_timeseries(timeseries, starttime - 90, endtime + 90)
    assert_equal(len(trace3.data), trace3.stats.npts)
    assert_equal(timeseries[0].stats.starttime, timeseries[2].stats.starttime)
    def put_timeseries(
        self,
        timeseries,
        starttime=None,
        endtime=None,
        channels=None,
        type=None,
        interval=None,
    ):
        """Store timeseries data.

        Parameters
        ----------
        timeseries : obspy.core.Stream
            stream containing traces to store.
        starttime : UTCDateTime
            time of first sample in timeseries to store.
            uses first sample if unspecified.
        endtime : UTCDateTime
            time of last sample in timeseries to store.
            uses last sample if unspecified.
        channels : array_like
            list of channels to store, optional.
            uses default if unspecified.
        type : {'definitive', 'provisional', 'quasi-definitive', 'variation'}
            data type, optional.
            uses default if unspecified.
        interval : {'daily', 'hourly', 'minute', 'monthly', 'second'}
            data interval, optional.
            uses default if unspecified.
        Raises
        ------
        TimeseriesFactoryException
            if any errors occur.
        """
        if starttime is not None or endtime is not None:
            timeseries = timeseries.copy()
            timeseries.trim(starttime=starttime, endtime=endtime)
        if channels is not None:
            filtered = Stream()
            for channel in channels:
                filtered += timeseries.select(channel=channel)
            timeseries = filtered
        timeseries.plot()
Beispiel #33
0
def readSACXY(filename,
              headonly=False,
              debug_headers=False,
              **kwargs):  # @UnusedVariable
    """
    Reads an alphanumeric SAC file and returns an ObsPy Stream object.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: str
    :param filename: Alphanumeric SAC file to be read.
    :type headonly: bool, optional
    :param headonly: If set to True, read only the head. This is most useful
        for scanning available data in huge (temporary) data sets.
    :type debug_headers: bool, optional
    :param debug_headers: Extracts also the SAC headers ``'nzyear', 'nzjday',
        'nzhour', 'nzmin', 'nzsec', 'nzmsec', 'delta', 'scale', 'npts',
        'knetwk', 'kstnm', 'kcmpnm'`` which are usually directly mapped to the
        :class:`~obspy.core.stream.Stream` object if set to ``True``. Those
        values are not synchronized with the Stream object itself and won't
        be used during writing of a SAC file! Defaults to ``False``.
    :rtype: :class:`~obspy.core.stream.Stream`
    :return: A ObsPy Stream object.

    .. rubric:: Example

    >>> from obspy.core import read # doctest: +SKIP
    >>> st = read("/path/to/testxy.sac") # doctest: +SKIP
    """
    t = SacIO(debug_headers=debug_headers)
    if headonly:
        t.ReadSacXYHeader(filename)
    else:
        t.ReadSacXY(filename)
    # assign all header entries to a new dictionary compatible with ObsPy
    header = t.get_obspy_header()

    if headonly:
        tr = Trace(header=header)
    else:
        tr = Trace(header=header, data=t.seis)
    return Stream([tr])
Beispiel #34
0
    def _convert_to_stream(receiver, components, data, dt_out, starttime,
                           add_band_code=True):
        # Convert to an ObsPy Stream object.
        st = Stream()
        band_code = get_band_code(dt_out)
        instaseis_header = AttribDict(mu=data["mu"])

        for comp in components:
            tr = Trace(
                data=data[comp],
                header={"delta": dt_out,
                        "starttime": starttime,
                        "station": receiver.station,
                        "network": receiver.network,
                        "location": receiver.location,
                        "channel": add_band_code * (band_code + 'X') + comp,
                        "instaseis": instaseis_header})
            st += tr
        return st
Beispiel #35
0
def test_get_stream_gaps_channels():
    """TimeseriesUtility_test.test_get_stream_gaps_channels()

    test that gaps are only checked in specified channels.
    """
    stream = Stream
    stream = Stream([
        __create_trace('H', [numpy.nan, 1, 1, numpy.nan, numpy.nan]),
        __create_trace('Z', [0, 0, 0, 1, 1, 1])
    ])
    for trace in stream:
        # set time of first sample
        trace.stats.starttime = UTCDateTime('2015-01-01T00:00:00Z')
        # set sample rate to 1 second
        trace.stats.delta = 1
    # find gaps
    gaps = TimeseriesUtility.get_stream_gaps(stream, ['Z'])
    assert_equals('H' in gaps, False)
    assert_equals(len(gaps['Z']), 0)
Beispiel #36
0
def readGSE1(filename,
             headonly=False,
             verify_chksum=True,
             **kwargs):  # @UnusedVariable
    """
    Reads a GSE1 file and returns a Stream object.

    GSE1 files containing multiple WID1 entries/traces are supported.

    .. warning::
        This function should NOT be called directly, it registers via the
        ObsPy :func:`~obspy.core.stream.read` function, call this instead.

    :type filename: string
    :type param: GSE2 file to be read.
    :type headonly: boolean, optional
    :param headonly: If True read only header of GSE1 file.
    :type verify_chksum: boolean, optional
    :param verify_chksum: If True verify Checksum and raise Exception if
        it is not correct.
    :rtype: :class:`~obspy.core.stream.Stream`
    :returns: Stream object containing header and data.

    .. rubric:: Example

    >>> from obspy.core import read
    >>> st = read("/path/to/y2000.gse")
    """
    traces = []
    # read GSE1 file
    fh = open(filename, 'rb')
    while True:
        try:
            if headonly:
                header = libgse1.readHeader(fh)
                traces.append(Trace(header=header))
            else:
                header, data = libgse1.read(fh, verify_chksum=verify_chksum)
                traces.append(Trace(header=header, data=data))
        except EOFError:
            break
    fh.close()
    return Stream(traces=traces)
Beispiel #37
0
 def test_bugfix_setStats3(self):
     """
     Third test related to issue #4.
     """
     st = Stream([Trace(header={'station': 'BGLD'})])
     self.assertEquals(st[0].stats.station, 'BGLD')
     st = st + st
     st[0].stats.station = 'AAA'
     st = st + st
     st[3].stats.station = 'BBB'
     # changed in rev. 1625: adding streams doesn't deepcopy
     # therefore all traces in the test stream are idential
     # (python list behavior)
     for tr in st:
         self.assertTrue(tr == st[0])
         self.assertEquals(tr.stats.station, 'BBB')
         self.assertEquals(tr.stats['station'], 'BBB')
         self.assertEquals(tr.stats.get('station'), 'BBB')
         self.assertTrue('BBB' in tr.stats.values())
Beispiel #38
0
def create_sinsoidal_trace_w_decay(sampling_rate=40.0,
                                   decay=0.01,
                                   period=0.5,
                                   duration=5.0,
                                   **header_kwargs):
    header = {
        'sampling_rate': sampling_rate,
        'starttime': UTCDateTime(0),
        'channel': 'Z',
        'station': 'test'
    }
    header = {**header, **header_kwargs}
    x = np.linspace(0, duration, num=int(duration * sampling_rate))
    data = np.sin(x * 2 * np.pi / period) * np.exp(-decay *
                                                   (x - duration / 2)**2)

    trace = Trace(data=data, header=header)
    trace.stats.data_type = 'test'
    return Stream(traces=[trace])
Beispiel #39
0
def write_segy(f, data):
    """
    Write a 2D NumPY array to an open file handle f.
    """
    stream = Stream()

    # Data is in [0, 1] so rescale to 8-bit.
    # USING 16-bit because can't save as 8-bit int in ObsPy.
    data = np.int16((data - 0.5) * 255)

    for i, trace in enumerate(data):

        # Make the trace.
        tr = Trace(trace)

        # Add required data.
        tr.stats.delta = 0.004

        # Add yet more to the header (optional).
        tr.stats.segy = {'trace_header': SEGYTraceHeader()}
        tr.stats.segy.trace_header.trace_sequence_number_within_line = i + 1
        tr.stats.segy.trace_header.receiver_group_elevation = 0

        # Append the trace to the stream.
        stream.append(tr)

    # Text header.
    stream.stats = AttribDict()
    stream.stats.textual_file_header = '{:80s}'.format(
        'Generated by Keats.').encode()
    stream.stats.textual_file_header += '{:80s}'.format(
        'Sample interval unknown.').encode()
    stream.stats.textual_file_header += '{:80s}'.format(
        'IEEE floats.').encode()

    # Binary header.
    stream.stats.binary_file_header = SEGYBinaryFileHeader()
    stream.stats.binary_file_header.trace_sorting_code = 4
    stream.stats.binary_file_header.seg_y_format_revision_number = 0x0100

    # Write the data.
    # Encoding should be 8, but that doesn't work.
    stream.write(f, format='SEGY', data_encoding=3, byteorder=sys.byteorder)

    return f
Beispiel #40
0
 def wrapper(network,
             station,
             location,
             channel,
             starttime,
             endtime,
             event=None):
     assert (win is None) == (event is None)
     if event is None:
         st = starttime
         et = endtime
     else:
         etime = get_origin(event).time
         evid = get_eventid(event)
         st = etime + win[0]
         et = etime + win[1]
         if starttime < st or endtime > et:
             log = logging.getLogger('waveform_cache')
             log.error('Window has to be inside (%.1fs, %.1fs)', *win)
             raise ValueError
     seedid = '.'.join((network, station, location, channel))
     if win is None:
         fname = path % (seedid, str(st)[:19], str(et)[:19])
     else:
         assert evid is not None
         fname = path % (evid, seedid, win[0], win[1])
     if os.path.exists(fname):
         stream = read(fname, format)
     if (not os.path.exists(fname)
             or (request_again and len(stream) < 3)):
         args = (network, station, location, channel, st, et)
         try:
             stream = gw_orig(*args)
             stream.merge(method=1,
                          fill_value='interpolate',
                          interpolation_samples=-1)
             if len(stream) == 0:
                 raise
         except (KeyboardInterrupt, SystemExit):
             raise
         except Exception as ex:
             stream = Stream(traces=[Trace(data=np.array([0.]))])
             log = logging.getLogger('waveform_cache')
             msg = 'channel %s: error while retrieving data: %s'
             log.info(msg, seedid, ex)
         stream.write(fname, format)
     if len(stream) == 1 and len(stream[0]) == 1:
         raise ValueError('No data available')
     stream.trim(starttime, endtime)
     return stream
    def _get_input_timeseries(self, observatory, channels, starttime, endtime):
        """Get timeseries from the input factory for requested options.

        Parameters
        ----------
        observatory : array_like
            observatories to request.
        channels : array_like
            channels to request.
        starttime : obspy.core.UTCDateTime
            time of first sample to request.
        endtime : obspy.core.UTCDateTime
            time of last sample to request.
        renames : array_like
            list of channels to rename
            each list item should be array_like:
                the first element is the channel to rename,
                the last element is the new channel name

        Returns
        -------
        timeseries : obspy.core.Stream
        """
        timeseries = Stream()
        for obs in observatory:
            # get input interval for observatory
            # do this per observatory in case an
            # algorithm needs different amounts of data
            input_start, input_end = self._algorithm.get_input_interval(
                start=starttime,
                end=endtime,
                observatory=obs,
                channels=channels)
            if input_start is None or input_end is None:
                continue
            timeseries += self._inputFactory.get_timeseries(
                observatory=obs,
                starttime=input_start,
                endtime=input_end,
                channels=channels,
            )
        return timeseries
def test_create_empty_trace():
    """TimeseriesUtility_test.test_create_empty_trace()
    """
    trace1 = _create_trace([1, 1, 1, 1, 1], 'H', UTCDateTime("2018-01-01"))
    trace2 = _create_trace([2, 2], 'E', UTCDateTime("2018-01-01"))
    observatory = 'Test'
    interval = 'minute'
    network = 'NT'
    location = 'R0'
    trace3 = TimeseriesUtility.create_empty_trace(
        starttime=trace1.stats.starttime,
        endtime=trace1.stats.endtime,
        observatory=observatory,
        channel='F',
        type='variation',
        interval=interval,
        network=network,
        station=trace1.stats.station,
        location=location)
    timeseries = Stream(traces=[trace1, trace2])
    # For continuity set stats to be same for all traces
    for trace in timeseries:
        trace.stats.observatory = observatory
        trace.stats.type = 'variation'
        trace.stats.interval = interval
        trace.stats.network = network
        trace.stats.station = trace1.stats.station
        trace.stats.location = location
    timeseries += trace3
    assert_equals(len(trace3.data), trace3.stats.npts)
    assert_equals(timeseries[0].stats.starttime, timeseries[2].stats.starttime)
    TimeseriesUtility.pad_timeseries(timeseries=timeseries,
                                     starttime=trace1.stats.starttime,
                                     endtime=trace1.stats.endtime)
    assert_equals(len(trace3.data), trace3.stats.npts)
    assert_equals(timeseries[0].stats.starttime, timeseries[2].stats.starttime)
    # Change starttime by more than 1 delta
    starttime = trace1.stats.starttime
    endtime = trace1.stats.endtime
    TimeseriesUtility.pad_timeseries(timeseries, starttime - 90, endtime + 90)
    assert_equals(len(trace3.data), trace3.stats.npts)
    assert_equals(timeseries[0].stats.starttime, timeseries[2].stats.starttime)
Beispiel #43
0
 def __init__(self, parent=None, evtdata=None):
     self._parent = parent
     if self.getParent():
         self.comp = parent.getComponent()
     else:
         self.comp = 'Z'
         self.wfdata = Stream()
     self._new = False
     if isinstance(evtdata, ObsPyEvent) or isinstance(evtdata, Event):
         pass
     elif isinstance(evtdata, dict):
         evt = readPILOTEvent(**evtdata)
         evtdata = evt
     elif isinstance(evtdata, str):
         try:
             cat = read_events(evtdata)
             if len(cat) is not 1:
                 raise ValueError('ambiguous event information for file: '
                                  '{file}'.format(file=evtdata))
             evtdata = cat[0]
         except TypeError as e:
             if 'Unknown format for file' in e.message:
                 if 'PHASES' in evtdata:
                     picks = picksdict_from_pilot(evtdata)
                     evtdata = ObsPyEvent()
                     evtdata.picks = picks_from_picksdict(picks)
                 elif 'LOC' in evtdata:
                     raise NotImplementedError('PILOT location information '
                                               'read support not yet '
                                               'implemeted.')
                 else:
                     raise e
             else:
                 raise e
     else:  # create an empty Event object
         self.setNew()
         evtdata = ObsPyEvent()
         evtdata.picks = []
     self.evtdata = evtdata
     self.wforiginal = None
     self.cuttimes = None
     self.dirty = False
Beispiel #44
0
def read_specfem_seismogram(output_files, network, station, band):
    st = Stream()
    for component in 'ZNE':
        channel = '%sX%s' % (band, component)
        filename = os.path.join(
            output_files, '%s.%s.%s.sem.ascii' % (network, station, channel))
        tmp = np.genfromtxt(filename)

        stats = Stats()
        stats.network = network
        stats.station = station
        stats.channel = channel
        stats.delta = tmp[1, 0] - tmp[0, 0]
        stats.npts = tmp.shape[0]
        stats.starttime = tmp[0, 0]

        tr = Trace(tmp[:, 1], stats)
        st += tr

    return st
Beispiel #45
0
    def request_from_server(self, station, network, channel, location, start_time, end_time):

        stream = Stream()

        try:
            self.logger.debug('Before getWaveform....')
            stream = self.client.getWaveform(network,
                                             station,
                                             location,
                                             channel,
                                             start_time,
                                             end_time)
            for cur_trace in stream:
                cur_trace.stats.unit = 'counts'
            self.logger.debug('got waveform: %s', stream)
            self.logger.debug('leave try')
        except Exception as e:
            self.logger.exception("Error connecting to waveserver: %s", e)

        return stream
Beispiel #46
0
    def load_data(self):
        """
        Function to read binary SAC data

        Reads binary SAC data into a list of :class:`~obspy.core.stream.Stream` objects,
        and assigns the list to the attribute ``streams``.

        .. rubric:: Example

        .. code-block:: python

           >>> config = tdmtpy.Configure(path_to_file="example/synthetic/mtinv.in")
           >>> tdmt = tdmtpy.Inversion(config=config)
           >>> tdmt.load_data()
           >>> tdmt.streams
           [3 Trace(s) in Stream:
           BK.FARB.00.Z | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.FARB.00.R | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.FARB.00.T | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples,
           3 Trace(s) in Stream:
           BK.SAO.00.Z | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.SAO.00.R | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.SAO.00.T | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples,
           3 Trace(s) in Stream:
           BK.CMB.00.Z | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.CMB.00.R | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.CMB.00.T | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples,
           3 Trace(s) in Stream:
           BK.MNRC.00.Z | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.MNRC.00.R | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples
           BK.MNRC.00.T | 1969-12-31T23:59:30.000000Z - 1970-01-01T00:04:10.000000Z | 1.0 Hz, 281 samples]

        """
        files = self.config.path_to_data + "/" + self.config.df.station.values
        streams = [Stream() for _ in files]
        for st, file in zip(streams, files):
            for component in self.config.components:
                st.append(
                    read("%s.%s.dat" % (file, component), format="SAC")[0])

        self.streams = streams
def test_Audet2016():
    import matplotlib
    matplotlib.use('Agg')
    from obspy.core import Stream
    from obspy.signal.rotate import rotate_ne_rt
    from telewavesim import utils as ut
    from telewavesim import wiggle as wg
    modfile = resource_filename('telewavesim',
                                'examples/models/model_Audet2016.txt')
    wvtype = 'P'
    npts = 3000  # Number of samples
    dt = 0.01  # Sample distance in seconds
    dp = 2000.  # Deployment depth below sea level in meters
    c = 1.500    # P-wave velocity in salt water (km/s)
    rhof = 1027.  # Density of salt water (kg/m^3)
    slow = 0.06  # Horizontal slowness (or ray parameter) in s/km
    # Back-azimuth direction in degrees
    # (has no influence if model is isotropic)
    baz = 0.
    model = ut.read_model(modfile)
    assert list(model.rho) == [2800.0, 2800.0, 3200.0]
    t1 = ut.calc_ttime(model, slow, wvtype=wvtype)
    assert round(t1, 1) == 1.1
    trxyz = ut.run_plane(model, slow, npts, dt, baz=baz, wvtype=wvtype,
                         obs=True, dp=dp, c=c, rhof=rhof)
    tfs = ut.tf_from_xyz(trxyz, pvh=False)
    ntr = trxyz[0]  # North component
    etr = trxyz[1]  # East component
    ztr = trxyz[2]  # Vertical component
    rtr = ntr.copy()  # Radial component
    ttr = etr.copy()  # Transverse component
    rtr.data, ttr.data = rotate_ne_rt(ntr.data, etr.data, baz)
    strf = Stream(traces=[tfs[0], ztr, rtr])
    # Set frequency corners in Hz
    f1 = 0.1
    f2 = 1.0
    # Plot as wiggles
    with tempfile.TemporaryDirectory() as tempdir:
        wg.pw_wiggles_Audet2016(strf, t1=t1, tmax=10., f1=f1, f2=f2,
                                ftitle=join(tempdir, 'audet2016'),
                                scale=2.e-7, save=True)
Beispiel #48
0
    def _send_data_command(self, cmd, data_format, as_stream=True, keep_files=False):
        """ Send a waveform request command and process the results.
        """

        data_format = data_format.lower()
        if data_format not in VALID_FORMATS:
            raise Exception('Invalid data format')
        if self.verbose:
            print("data_format={} cmd={}".format(data_format, cmd))

        file_lst = []
        dir_lst = []
        self.socket.sendall('{}\n'.format(data_format).encode('utf-8'))
        self._receive_data(dir_lst, file_lst)
        self.socket.sendall(cmd.encode('utf-8'))
        self._receive_data(dir_lst, file_lst)        

        waveform_stream = None
        if as_stream:
            waveform_stream = Stream()
            ntraces = 0
            for f in file_lst:
                try:
                    if self.verbose:
                        print('Reading {}'.format(f))
                    tr = obspy.core.read(f)
                    waveform_stream += tr
                    ntraces += 1
                except TypeError:
                    if self.verbose:
                        print('{} is in unknown format. Skipping.'.format(f))
                        

                if not keep_files:
                    if self.verbose:
                        print("Removing {} after reading".format(f))
                    if os.path.isfile(f):
                        os.remove(f)
            print('Processed {} waveform traces'.format(ntraces))
            
        return waveform_stream
def test_pad_timeseries():
    """TimeseriesUtility_test.test_pad_timeseries()
    """
    trace1 = _create_trace([1, 1, 1, 1, 1], 'H', UTCDateTime("2018-01-01"))
    trace2 = _create_trace([2, 2], 'E', UTCDateTime("2018-01-01"))
    timeseries = Stream(traces=[trace1, trace2])
    TimeseriesUtility.pad_timeseries(timeseries=timeseries,
                                     starttime=trace1.stats.starttime,
                                     endtime=trace1.stats.endtime)
    assert_equals(len(trace1.data), len(trace2.data))
    assert_equals(trace1.stats.starttime, trace2.stats.starttime)
    assert_equals(trace1.stats.endtime, trace2.stats.endtime)
    # change starttime by less than 1 delta
    starttime = trace1.stats.starttime
    endtime = trace1.stats.endtime
    TimeseriesUtility.pad_timeseries(timeseries, starttime - 30, endtime + 30)
    assert_equals(trace1.stats.starttime, starttime)
    # Change starttime by more than 1 delta
    TimeseriesUtility.pad_timeseries(timeseries, starttime - 90, endtime + 90)
    assert_equals(trace1.stats.starttime, starttime - 60)
    assert_equals(numpy.isnan(trace1.data[0]), numpy.isnan(numpy.NaN))
Beispiel #50
0
 def setUp(self):
     # directory where the test files are located
     self.path = os.path.join(os.path.dirname(__file__), 'data')
     self.filename = os.path.join(self.path, 'test.wfdisc')
     # set up stream for validation
     header = {}
     header['station'] = 'TEST'
     header['starttime'] = UTCDateTime(1296474900.0)
     header['sampling_rate'] = 80.0
     header['calib'] = 1.0
     header['calper'] = 1.0
     header['_format'] = 'CSS'
     filename = os.path.join(self.path, '201101311155.10.ascii.gz')
     data = np.loadtxt(filename, dtype='int')
     # traces in the test files are sorted ZEN
     st = Stream()
     for x, cha in zip(data.reshape((3, 4800)), ('HHZ', 'HHE', 'HHN')):
         tr = Trace(x, header.copy())
         tr.stats.channel = cha
         st += tr
     self.st_result = st
def time_difference(isource, j):
    """Compute the time difference between data and synthetics

	Input:
	isource = index of the source
	j = scale at which we run the inversion process"""

    namedir1 = 'Source_' + str(isource + 1)
    os.chdir(namedir1)

    filename_d = 'OUTPUT_FILES/data_process.su'
    filename_s = 'OUTPUT_FILES/synthetics_process.su'
    filename_i = 'OUTPUT_FILES/Up_file_single.su'
    stream_d = read(filename_d, format='SU', byteorder='<')
    stream_s = read(filename_s, format='SU', byteorder='<')
    stream_i = read(filename_i, format='SU')

    misfit = 0.0
    stream_adj = Stream()
    for irec in range(0, nrec):
        adj = numpy.zeros(nt_s)
        trace_i = stream_i[irec].copy()
        if irec >= rstart - 1 and irec <= rend - 1:
            trace_d = stream_d[irec].copy()
            trace_s = stream_s[irec].copy()
            if trace_d.data.size != trace_s.data.size:
                raise ValueError(
                    "Data and synthetic signals should have the same length")
            nstep = trace_s.data.size
            adj_temp = numpy.zeros(nt_ref)
            starttime = tstart[j - 1] + irec * 25.0 * sstart[j - 1]
            istart = int(starttime / dt_ref)
            for it in range(0, nstep):
                misfit += 0.5 * numpy.power(
                    f * trace_s.data[it] - trace_d.data[it], 2.0)
                adj_temp[istart + it] = f * trace_s.data[it] - trace_d.data[it]
            trace_adj = Trace(data=adj_temp, header=trace_s.stats)
            trace_adj.interpolate(sampling_rate=1.0 / dt_s,
                                  starttime=trace_adj.stats.starttime,
                                  npts=nt_s)
        else:
            trace_adj = Trace(data=adj, header=trace_i.stats)
        trace_adj.data = numpy.require(trace_adj.data, dtype=numpy.float32)
        stream_adj.append(trace_adj)
    stream_adj.write('SEM/Up_file_single.su.adj', format='SU')
    os.chdir('..')

    return misfit
Beispiel #52
0
    def _createStream(self, starttime, endtime, sampling_rate):
        """
        Helper method to create a Stream object that can be used for testing
        waveform plotting.

        Takes the time frame of the Stream to be created and a sampling rate.
        Any other header information will have to be adjusted on a case by case
        basis. Please remember to use the same sampling rate for one Trace as
        merging and plotting will not work otherwise.

        This method will create a single sine curve to a first approximation
        with superimposed 10 smaller sine curves on it.

        :return: Stream object
        """
        time_delta = endtime - starttime
        number_of_samples = time_delta * sampling_rate + 1
        # Calculate first sine wave.
        curve = np.linspace(0, 2 * np.pi, int(number_of_samples // 2))
        # Superimpose it with a smaller but shorter wavelength sine wave.
        curve = np.sin(curve) + 0.2 * np.sin(10 * curve)
        # To get a thick curve alternate between two curves.
        data = np.empty(number_of_samples)
        # Check if even number and adjust if necessary.
        if number_of_samples % 2 == 0:
            data[0::2] = curve
            data[1::2] = curve + 0.2
        else:
            data[-1] = 0.0
            data[0:-1][0::2] = curve
            data[0:-1][1::2] = curve + 0.2
        tr = Trace()
        tr.stats.starttime = starttime
        tr.stats.sampling_rate = float(sampling_rate)
        # Fill dummy header.
        tr.stats.network = 'BW'
        tr.stats.station = 'OBSPY'
        tr.stats.channel = 'TEST'
        tr.data = data
        return Stream(traces=[tr])
Beispiel #53
0
def pick(config_file):
    """
    :param config_file: user supplied config file for picking
    :return: tba
    """
    log.info('Reading config file...')
    cf = config.Config(config_file)

    log.info('Preparing time series')
    st = Stream()

    if cf.seeds:
        for f in cf.miniseeds:
            st += obspy_read(f)
        log.info('Miniseeds accumulated')
    else:
        raise NotImplementedError
    log.info('Applying picking algorithm')
    picker = pickermaps[cf.picker['algorithm']](**cf.picker['params'])

    event = picker.event(st, config=cf)
    event.write(filename='test.xml', format='SC3ML')
Beispiel #54
0
    def process(self, stream):
        """Run algorithm for a stream.
        Processes all traces in the stream.
        Parameters
        ----------
        stream : obspy.core.Stream
            stream of data to process
        Returns
        -------
        out : obspy.core.Stream
            stream containing 1 trace per original trace. (h, e, z)->(X, Y, Z)
        """

        out = None
        inchannels = self.get_input_channels()
        outchannels = self.get_output_channels()
        raws = np.vstack(
            [
                stream.select(channel=channel)[0].data
                for channel in inchannels
                if channel != "F"
            ]
            + [np.ones_like(stream[0].data)]
        )
        adjusted = np.matmul(self.matrix, raws)
        out = Stream(
            [
                self.create_trace(
                    outchannels[i],
                    stream.select(channel=inchannels[i])[0].stats,
                    adjusted[i],
                )
                for i in range(len(adjusted) - 1)
            ]
        )
        if "F" in inchannels and "F" in outchannels:
            f = stream.select(channel="F")[0]
            out += self.create_trace("F", f.stats, f.data + self.pier_correction)
        return out
Beispiel #55
0
 def test_bugWriteReadFloat32SEEDWin32(self):
     """
     Test case for issue #64.
     """
     # create stream object
     data = np.array([
         395.07809448, 395.0782, 1060.28112793, -1157.37487793,
         -1236.56237793, 355.07028198, -1181.42175293
     ],
                     dtype=np.float32)
     st = Stream([Trace(data=data)])
     tempfile = NamedTemporaryFile().name
     writeMSEED(st, tempfile, format="MSEED")
     # read temp file directly without libmseed
     bin_data = open(tempfile, "rb").read()
     bin_data = np.array(unpack(">7f", bin_data[56:84]))
     np.testing.assert_array_equal(data, bin_data)
     # read via ObsPy
     st2 = readMSEED(tempfile)
     os.remove(tempfile)
     # test results
     np.testing.assert_array_equal(data, st2[0].data)
Beispiel #56
0
 def loadGaps(self, frames, network, station, location, channel):
     """
     Returns a stream object that will contain all time spans from the
     provided list.
     """
     streams = []
     for frame in frames:
         temp = self.win.seishub.getPreview(network, station, location,
                                            channel, UTCDateTime(frame[0]),
                                            UTCDateTime(frame[1]))
         # Convert to float32
         if len(temp):
             temp[0].data = np.require(temp[0].data, 'float32')
             streams.append(temp)
     if len(streams):
         stream = streams[0]
         if len(streams) > 1:
             for _i in streams[1:]:
                 stream += _i
     else:
         stream = Stream()
     return stream
Beispiel #57
0
def test__put_timeseries():
    """edge_test.MiniSeedFactory_test.test__put_timeseries()"""
    trace1 = __create_trace([0, 1, 2, 3, numpy.nan, 5, 6, 7, 8, 9],
                            channel="H")
    client = MockMiniSeedInputClient()
    factory = MiniSeedFactory()
    factory.write_client = client
    factory.put_timeseries(Stream(trace1), channels=("H"))
    # put timeseries should call close when done
    assert_equal(client.close_called, True)
    # trace should be split in 2 blocks at gap
    sent = client.last_sent
    assert_equal(len(sent), 2)
    # first trace includes [0...4]
    assert_equal(sent[0].stats.channel, "LFH")
    assert_equal(len(sent[0]), 4)
    assert_equal(sent[0].stats.endtime, trace1.stats.starttime + 3)
    # second trace includes [5...9]
    assert_equal(sent[1].stats.channel, "LFH")
    assert_equal(len(sent[1]), 5)
    assert_equal(sent[1].stats.starttime, trace1.stats.starttime + 5)
    assert_equal(sent[1].stats.endtime, trace1.stats.endtime)
Beispiel #58
0
    def to_obspy_stream(self):
        """
        convert time series to an :class:`obspy.core.Stream` which is like a
        list of :class:`obspy.core.Trace` objects.

        :return: An Obspy Stream object from the time series data
        :rtype: :class:`obspy.core.Stream`

        """

        trace_list = []
        for channel in self.channels:
            if channel[0] in ["e"]:
                ch_type = "electric"
            elif channel[0] in ["h", "b"]:
                ch_type = "magnetic"
            else:
                ch_type = "auxiliary"
            ts_obj = ChannelTS(ch_type, self.dataset[channel])
            trace_list.append(ts_obj.to_obspy_trace())

        return Stream(traces=trace_list)
Beispiel #59
0
def spectrum(st, win=None, nfft=None, plot=False, powerspec=False, scaling='spectrum', normalize=True,
             KOsmooth=False, bandwidth=40, KOnormalize=False):
    """
    Make amplitude spectrum of traces in stream and plot using rfft (for real inputs, no negative frequencies)

    Args
        st = obspy Stream or trace object
        win = tuple of time window in seconds (e.g. win=(3., 20.)) over which to compute amplitude spectrum
        nfft = number of points to use in nfft, default None uses the next power of 2 of length of trace
        plot = True, plot spectrum, False, don't
        powerspec = False for fourier amplitude spectrum, True for power spectrum
        scaling = if powerspec is True, 'density' or 'spectrum' for power spectral density (V**2/Hz)
            or power spectrum (V**2)
        normalize (bool): if True, will normalize by signal length by dividing by 1/NFFT (1/NFFT = deltat/time length),
            if False, will scale by deltat (1/samprate) to approximate continuous transform
        KOsmooth (bool): if True, will smooth spectrum using Konno Ohmachi Smoothing
        bandwidth (int): bandwidth parameter for KOsmooth (typically 40)
        normalize (bool): for KOsmooth only, if False will normalize on a log scale (default), if True, will normalize on linear

    Returns
        freqs = frequency vector, only positive values
        amps = amplitude vector
    """
    st = Stream(st)  # turn into a stream object in case st is a trace

    amps = []
    freqs = []
    for trace in st:
        tvec = maketvec(trace)  # Time vector
        dat = trace.data
        freq, amp = spectrum_manual(dat, tvec, win, nfft, plot, powerspec, scaling, KOsmooth=KOsmooth,
                                    bandwidth=bandwidth, normalize=normalize)
        amps.append(amp)
        freqs.append(freq)
    if len(st) == 1:
        freqs = freqs[0]
        amps = amps[0]
    return freqs, amps
Beispiel #60
0
def _make_traces(stream=None,
                 stats=None,
                 header=None,
                 channels=None,
                 data_x=None,
                 data_y=None,
                 data_z=None,
                 data_sz=None,
                 abs_times=None,
                 frame_count_ncs=None):
    '''
    Make the traces from the lists imported from the csv file.
    '''

    if stream is None:
        stream = Stream()

    if len(abs_times) > 1:
        if len(header) == 8:
            stats.channel = channels[0]
            _append_stream(stream, data_x, stats)
            stats_y = stats.copy()
            stats_y.channel = channels[1]
            _append_stream(stream, data_y, stats_y)
            stats_z = stats.copy()
            stats_z.channel = channels[2]
            _append_stream(stream, data_z, stats_z)
        else:
            stats.channel = channels[0]
            _append_stream(stream, data_sz, stats)

        stats_times = stats.copy()
        stats_times.channel = '_TT'
        _append_stream(stream, abs_times, stats_times)

        stats_frames = stats.copy()
        stats_frames.channel = '_FR'
        _append_stream(stream, frame_count_ncs, stats_frames)