Exemplo n.º 1
0
 def gapped_high_sample_stream(self):
     """
     Create a stream which has two overlapping traces with high sampling
     rates.
     """
     # first trace
     stats1 = {
         "sampling_rate": 6000.0,
         "starttime": UTCDateTime(2017, 9, 23, 18, 50, 29, 715100),
         "endtime": UTCDateTime(2017, 9, 23, 18, 50, 31, 818933),
         "network": "XI",
         "station": "00037",
         "location": "00",
         "channel": "FL1",
     }
     data1 = np.random.rand(12624)
     tr1 = obspy.Trace(data=data1, header=stats1)
     # second trace
     stat2 = {
         "sampling_rate": 6000.0,
         "delta": 0.00016666666666666666,
         "starttime": UTCDateTime(2017, 9, 23, 18, 50, 31, 819100),
         "endtime": UTCDateTime(2017, 9, 23, 18, 50, 31, 973933),
         "npts": 930,
         "calib": 1.0,
         "network": "XI",
         "station": "00037",
         "location": "00",
         "channel": "FL1",
     }
     data2 = np.random.rand(930)
     tr2 = obspy.Trace(data=data2, header=stat2)
     return obspy.Stream(traces=[tr1, tr2])
Exemplo n.º 2
0
def clean_up(corr, sampling_rate, freqmin, freqmax):
    data = []
    st = obspy.Stream()
    if corr.ndim == 2:
        for ii in range(len(corr)):
            tr = obspy.Trace(data=corr[ii, :])
            tr.stats.sampling_rate = sampling_rate
            st += tr
            del tr
    else:
        tr = obspy.Trace(data=corr)
        tr.stats.sampling_rate = sampling_rate
        st += tr
        del tr

    st.detrend('constant')
    st.detrend('simple')
    percent = sampling_rate * 20 / st[0].stats.npts
    st.taper(max_percentage=percent, max_length=20.)
    st.filter('bandpass', freqmin=freqmin, freqmax=freqmax, zerophase=True)
    for tr in st:
        data.append(tr.data)
    data = np.array(data)
    if data.shape[0] == 1:
        data = data.flatten()
    return data
Exemplo n.º 3
0
def test_calc_timeshift():

    data_test = [0, 0, 1, 2, 1, 0, 0]
    tr_test = obspy.Trace(np.array(data_test),
                          header=dict(station='AAA', location='00', delta=0.1))
    st_ref = Stream(tr_test)
    tr_test = obspy.Trace(np.array(data_test),
                          header=dict(station='BBB', location='00', delta=0.1))
    st_ref.append(tr_test)

    # Shift backwards by 0.2 s
    dt = dict()
    dt['AAA.00'] = 0.2
    dt['BBB.00'] = -0.1
    st_shift = st_ref.copy()
    st_shift.shift_waveform(dt)
    dt_res, CC = st_shift.calc_timeshift(st_ref)

    npt.assert_allclose(dt_res['AAA.00'], dt['AAA.00'], rtol=1e-5)
    npt.assert_allclose(dt_res['BBB.00'], dt['BBB.00'], rtol=1e-5)

    # Shift backwards by 0.2 s, reverse order of traces in stream
    dt = dict()
    dt['AAA.00'] = 0.2
    dt['BBB.00'] = -0.1
    st_shift = st_ref.copy()
    st_shift.shift_waveform(dt)
    st_shift.sort(keys=['station'], reverse=True)
    dt_res, CC = st_shift.calc_timeshift(st_ref)

    npt.assert_allclose(dt_res['AAA.00'], dt['AAA.00'], rtol=1e-5)
    npt.assert_allclose(dt_res['BBB.00'], dt['BBB.00'], rtol=1e-5)
Exemplo n.º 4
0
def test_calc_amplitude_misfit():

    data_test = [0.0, 0.0, 1.0, 2.0, 1.0, 0.0, 0.0]
    tr_test = obspy.Trace(np.array(data_test),
                          header=dict(station='AAA', location='00', delta=0.1))
    st_ref = Stream(tr_test)
    tr_test = obspy.Trace(np.array(data_test),
                          header=dict(station='BBB', location='00', delta=0.1))
    st_ref.append(tr_test)

    st_mult = st_ref.copy()

    st_mult.select(station='AAA')[0].data *= 2
    st_mult.select(station='BBB')[0].data *= 0.5

    dA = st_mult.calc_amplitude_misfit(st_ref)

    npt.assert_almost_equal(dA['AAA.00'], 2.0, decimal=5)
    npt.assert_almost_equal(dA['BBB.00'], 0.5, decimal=5)

    # Check when order is mixed up
    st_mult = st_ref.copy()

    st_mult.select(station='AAA')[0].data *= 2
    st_mult.select(station='BBB')[0].data *= 0.5

    st_mult.sort(keys=['station'], reverse=True)

    dA = st_mult.calc_amplitude_misfit(st_ref)

    npt.assert_almost_equal(dA['AAA.00'], 2.0, decimal=5)
    npt.assert_almost_equal(dA['BBB.00'], 0.5, decimal=5)
Exemplo n.º 5
0
def test_shift_waveform():

    data_test = [0, 0, 1, 2, 1, 0, 0]
    tr_test = obspy.Trace(np.array(data_test),
                          header=dict(station='AAA', location='00', delta=0.1))
    st_ref = Stream(tr_test)
    tr_test = obspy.Trace(np.array(data_test),
                          header=dict(station='BBB', location='00', delta=0.1))
    st_ref.append(tr_test)

    dt = dict()
    dt['AAA.00'] = 0.2
    dt['BBB.00'] = -0.1
    st_shift = st_ref.copy()
    st_shift.shift_waveform(dt)

    # Shift backwards by 0.2 s
    data_ref = [0, 0, 0, 0, 1, 2, 1]
    npt.assert_allclose(st_shift.select(station='AAA')[0].data,
                        data_ref,
                        rtol=1e-5,
                        atol=1e-3,
                        err_msg='Shifted data not as expected')

    # Shift forwards by 0.1 s
    data_ref = [0, 1, 2, 1, 0, 0, 0]
    npt.assert_allclose(st_shift.select(station='BBB')[0].data,
                        data_ref,
                        rtol=1e-5,
                        atol=1e-3,
                        err_msg='Shifted data not as expected')
Exemplo n.º 6
0
    def test_raise_unmatching_ids(self):
        """
        Test error raised for multiple stream identifiers
        """
        with NamedTemporaryFile() as tf1, NamedTemporaryFile() as tf2:
            obspy.Trace(data=np.arange(10, dtype=np.int32),
                        header={
                            "starttime": obspy.UTCDateTime(0),
                            "network": "NL",
                            "station": "HGN",
                            "location": "02",
                            "channel": "BHZ"
                        }).write(tf1.name,
                                 format="mseed",
                                 encoding="STEIM1",
                                 reclen=256)
            obspy.Trace(data=np.arange(10, dtype=np.float32),
                        header={
                            "starttime": obspy.UTCDateTime(100),
                            "sampling_rate": 2.0,
                            "network": "BW",
                            "station": "ALTM",
                            "location": "00",
                            "channel": "EHE"
                        }).write(tf2.name,
                                 format="mseed",
                                 encoding="FLOAT32",
                                 reclen=1024)

            with self.assertRaises(ValueError) as e:
                MSEEDMetadata([tf1.name, tf2.name])

        self.assertEqual(e.exception.args[0],
                         "All traces must have the same SEED id and quality.")
Exemplo n.º 7
0
 def gappy_stream(self):
     """Create a very simple mseed with one gap, return it."""
     stats = dict(
         network="UU",
         station="ELU",
         location="01",
         channel="ELZ",
         sampling_rate=1,
         starttime=self.start,
     )
     len1 = int(self.gap_start - self.start)
     # create first trace
     ar1 = np.random.rand(len1)
     tr1 = obspy.Trace(data=ar1, header=stats)
     assert tr1.stats.endtime <= self.gap_start
     # create second trace
     len2 = int(self.end - self.gap_end)
     ar2 = np.random.rand(len2)
     stats2 = dict(stats)
     stats2.update({"starttime": self.gap_end})
     tr2 = obspy.Trace(data=ar2, header=stats2)
     # assemble traces make sure gap is there
     assert tr2.stats.starttime >= self.gap_end
     st = obspy.Stream(traces=[tr1, tr2])
     gaps = st.get_gaps()
     assert len(gaps) == 1
     return st
Exemplo n.º 8
0
def test_filter_bad_waveforms():

    CC = dict()

    tr = obspy.Trace(header={'station': 'AAA', 'location': '00'})
    st = Stream(traces=tr)
    code = '%s.%s' % (tr.stats.station, tr.stats.location)
    CC[code] = 0.1

    tr = obspy.Trace(header={'station': 'BBB', 'location': '00'})
    st.append(tr)
    code = '%s.%s' % (tr.stats.station, tr.stats.location)
    CC[code] = 0.8

    tr = obspy.Trace(header={'station': 'CCC', 'location': '00'})
    st.append(tr)
    code = '%s.%s' % (tr.stats.station, tr.stats.location)
    CC[code] = -0.9

    tr = obspy.Trace(header={'station': 'DDD', 'location': '00'})
    st.append(tr)
    code = '%s.%s' % (tr.stats.station, tr.stats.location)
    CC[code] = 0.6

    st_filter = st.filter_bad_waveforms(CC=CC, CClim=0.6)

    npt.assert_equal(len(st_filter), 2)
    npt.assert_string_equal(str(st_filter[0].stats.station), 'BBB')
    npt.assert_string_equal(str(st_filter[1].stats.station), 'DDD')
Exemplo n.º 9
0
def stack(stream, length=None, move=None):
    """
    Stack traces in stream by correlation id

    :param stream: |Stream| object with correlations
    :param length: time span of one trace in the stack in seconds
        (alternatively a string consisting of a number and a unit
        -- ``'d'`` for days and ``'h'`` for hours -- can be specified,
        i.e. ``'3d'`` stacks together all traces inside a three days time
        window, default: None, which stacks together all traces)
    :param move: define a moving stack, float or string,
        default: None -- no moving stack,
        if specified move usually is smaller than length to get an overlap
        in the stacked traces
    :return: |Stream| object with stacked correlations
    """
    stream.sort()
    stream_stack = obspy.Stream()
    ids = {_corr_id(tr) for tr in stream}
    ids.discard(None)
    for id_ in ids:
        traces = [tr for tr in stream if _corr_id(tr) == id_]
        if length is None:
            data = np.mean([tr.data for tr in traces], dtype=float, axis=0)
            tr_stack = obspy.Trace(data, header=traces[0].stats)
            tr_stack.stats.key = tr_stack.stats.key + '_s'
            if 'num' in traces[0].stats:
                tr_stack.stats.num = sum(tr.stats.num for tr in traces)
            else:
                tr_stack.stats.num = len(traces)
            stream_stack.append(tr_stack)
        else:
            t1 = traces[0].stats.starttime
            lensec = _time2sec(length)
            movesec = _time2sec(move) if move else lensec
            if (lensec % (24 * 3600) == 0
                    or isinstance(length, str) and 'd' in length):
                t1 = UTC(t1.year, t1.month, t1.day)
            elif (lensec % 3600 == 0
                  or isinstance(length, str) and 'm' in length):
                t1 = UTC(t1.year, t1.month, t1.day, t1.hour)
            t2 = max(t1, traces[-1].stats.endtime - lensec)
            for t in IterTime(t1, t2, dt=movesec):
                sel = [
                    tr for tr in traces
                    if -0.1 <= tr.stats.starttime - t <= lensec + 0.1
                ]
                if len(sel) == 0:
                    continue
                data = np.mean([tr.data for tr in sel], dtype=float, axis=0)
                tr_stack = obspy.Trace(data, header=sel[0].stats)
                key_add = '_s%s' % length + (move is not None) * ('m%s' % move)
                tr_stack.stats.key = tr_stack.stats.key + key_add
                tr_stack.stats.starttime = t
                if 'num' in traces[0].stats:
                    tr_stack.stats.num = sum(tr.stats.num for tr in sel)
                else:
                    tr_stack.stats.num = len(sel)
                stream_stack.append(tr_stack)
    return stream_stack
Exemplo n.º 10
0
 def test_extraction_of_basic_mseed_headers(self):
     """
     Tests extraction of basic features.
     """
     # Mixed files.
     with NamedTemporaryFile() as tf1, NamedTemporaryFile() as tf2:
         obspy.Trace(data=np.arange(10, dtype=np.int32),
                     header={"starttime": obspy.UTCDateTime(0),
                             "network": "BW", "station": "ALTM",
                             "location": "00", "channel": "EHE"}).write(
                 tf1.name, format="mseed", encoding="STEIM1", reclen=256)
         obspy.Trace(data=np.arange(10, dtype=np.float32),
                     header={"starttime": obspy.UTCDateTime(100),
                             "sampling_rate": 2.0, "network": "BW",
                             "station": "ALTM", "location": "00",
                             "channel": "EHE"}).write(
                 tf2.name, format="mseed", encoding="FLOAT32", reclen=1024)
         md = MSEEDMetadata([tf1.name, tf2.name], add_flags=True)
         self.assertEqual(md.meta["network"], "BW")
         self.assertEqual(md.meta["station"], "ALTM")
         self.assertEqual(md.meta["location"], "00")
         self.assertEqual(md.meta["channel"], "EHE")
         self.assertEqual(md.meta["quality"], "D")
         self.assertEqual(md.meta["start_time"], obspy.UTCDateTime(0))
         self.assertEqual(md.meta["end_time"],
                          obspy.UTCDateTime(105))
         self.assertEqual(md.meta["num_records"], 2)
         self.assertEqual(md.meta["num_samples"], 20)
         self.assertEqual(md.meta["sample_rate"], [1.0, 2.0])
         self.assertEqual(md.meta["record_length"], [256, 1024])
         self.assertEqual(md.meta["encoding"], ["FLOAT32", "STEIM1"])
Exemplo n.º 11
0
def make_stream(dataset):
    '''
    input: hdf5 dataset
    output: obspy stream
    
    '''
    data = np.array(dataset)
              
    tr_E = obspy.Trace(data=data[:, 0])
    tr_E.stats.starttime = UTCDateTime(dataset.attrs['trace_start_time'])
    tr_E.stats.delta = 0.01
    tr_E.stats.channel = dataset.attrs['receiver_type']+'E'
    tr_E.stats.station = dataset.attrs['receiver_code']
    tr_E.stats.network = dataset.attrs['network_code']
    
    tr_N = obspy.Trace(data=data[:, 1])
    tr_N.stats.starttime = UTCDateTime(dataset.attrs['trace_start_time'])
    tr_N.stats.delta = 0.01
    tr_N.stats.channel = dataset.attrs['receiver_type']+'N'
    tr_N.stats.station = dataset.attrs['receiver_code']
    tr_N.stats.network = dataset.attrs['network_code']
    
    tr_Z = obspy.Trace(data=data[:, 2])
    tr_Z.stats.starttime = UTCDateTime(dataset.attrs['trace_start_time'])
    tr_Z.stats.delta = 0.01
    tr_Z.stats.channel = dataset.attrs['receiver_type']+'Z'
    tr_Z.stats.station = dataset.attrs['receiver_code']
    tr_Z.stats.network = dataset.attrs['network_code']

    stream = obspy.Stream([tr_E, tr_N, tr_Z])
    
    return stream
Exemplo n.º 12
0
 def fix_cut(self):
     """
     Checks to see if any trace is cut into multiple smaller traces based off their unique IDs
     
     If there are multiple traces with the same ID, they will be added together
     Any missing data between cut traces will be filled with the linear interpolation between the
     start and end points of each trace
     """
     toappend = []
     newwf = self.copy()
     i = 0
     for it1, trace1 in enumerate(self):
         item = []
         idoi = trace1.id
         for it2, trace2 in enumerate(newwf):
             if trace1.id == trace2.id:
                 item.append(it2)
         for tr in newwf.select(id=idoi):
             tr.id = 'NA.NA..NA'
         toappend.append(item)
 
     newwf = op.Stream()
     for item in toappend:
         newtrace = op.Trace()
         if len(item) > 1:
             newtrace = self[item[0]].__add__(self[item[1]],fill_value='interpolate')
             if len(item) > 2:
                 for i in range(2,len(item)):
                     newtrace += self[item[i]]
             newwf += newtrace
         elif len(item) > 0:
             newwf += self[item[0]]
 
     wf = newwf.copy()
     newwf = op.Stream()
     for t in wf:
         if t.stats.npts != (self.seconds*self.sr + 1):
             newst = t.stats.__deepcopy__()
             newst.npts = self.seconds*self.sr + 1
             newst.starttime = self.starttime
 
             st1 = t.stats.__deepcopy__()
             st1.starttime = self.starttime
             st1.npts = 1
 
             st2 = st1.__deepcopy__()
             st2.starttime = self.endtime
 
             t1 = op.Trace(data=np.ones(1,dtype=t.data.dtype),header=st1)
             t2 = op.Trace(data=np.ones(1,dtype=t.data.dtype),header=st2)
 
             newt = (t.__add__(t1,fill_value=0)).__add__(t2,fill_value=0)
 
             newwf += newt
         else:
             newwf += t   
             
     self.clear()
     self += newwf
Exemplo n.º 13
0
    def test_inplace(self):
        src = 'test_src'
        out = 'test_out'
        inplace = True

        data = {src: obspy.Trace(np.array([0, 0, 0]))}
        transformed = obspy.Trace(np.array([1, 1, 1]))

        tf = BaseTraceTransform(source=src, output=out, inplace=inplace)
        data = tf.update(data, transformed)
        assert len(data.keys()) == 1
        assert data[src] == transformed
Exemplo n.º 14
0
    def test_continuous_segments_combined(self):
        """
        Test continuous segments from traces in two files
        that are continuous. Also test a continuous segment
        that is continuous but has a different sampling rate
        """
        tr_1 = obspy.Trace(data=np.arange(10, dtype=np.int32),
                           header={"starttime": obspy.UTCDateTime(0)})
        tr_2 = obspy.Trace(data=np.arange(10, dtype=np.int32),
                           header={"starttime": obspy.UTCDateTime(10)})
        tr_3 = obspy.Trace(data=np.arange(10, dtype=np.int32),
                           header={
                               "starttime": obspy.UTCDateTime(20),
                               "sampling_rate": 0.5
                           })
        st = obspy.Stream(traces=[tr_1, tr_3])
        st2 = obspy.Stream(traces=[tr_2])
        with NamedTemporaryFile() as tf1, NamedTemporaryFile() as tf2:

            st.write(tf1.name, format="mseed")
            st2.write(tf2.name, format="mseed")
            md = MSEEDMetadata(files=[tf1.name, tf2.name])
            c_seg = md.meta["c_segments"]
            self.assertEqual(len(c_seg), 2)

            c = c_seg[0]
            self.assertEqual(c["start_time"], obspy.UTCDateTime(0))
            self.assertEqual(c["end_time"], obspy.UTCDateTime(20))
            self.assertEqual(c["segment_length"], 20)
            self.assertEqual(c["sample_min"], 0)
            self.assertEqual(c["sample_max"], 9)
            self.assertEqual(c["num_samples"], 20)
            self.assertEqual(c["sample_median"], 4.5)
            self.assertEqual(c["sample_lower_quartile"], 2.0)
            self.assertEqual(c["sample_upper_quartile"], 7.0)
            self.assertEqual(c["sample_rate"], 1.0)

            # Not continuous because of different sampling_rate (0.5)
            c = c_seg[1]
            self.assertEqual(c["start_time"], obspy.UTCDateTime(20))
            self.assertEqual(c["end_time"], obspy.UTCDateTime(40))
            self.assertEqual(c["segment_length"], 20)
            self.assertEqual(c["sample_min"], 0)
            self.assertEqual(c["sample_max"], 9)
            self.assertEqual(c["num_samples"], 10)
            self.assertEqual(c["sample_median"], 4.5)
            self.assertEqual(c["sample_lower_quartile"], 2.25)
            self.assertEqual(c["sample_upper_quartile"], 6.75)
            self.assertEqual(c["sample_rate"], 0.5)
Exemplo n.º 15
0
def test_taper_signal():
    tr_ref = obspy.Trace(data=np.ones(100), header={'delta': 0.1})
    result_ref = np.array([
        0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
        0.00647486868104, 0.0257317790264, 0.0572719871734, 0.100278618298,
        0.153637823245, 0.215967626634, 0.285653719298, 0.360891268042,
        0.439731659872, 0.520132970055, 0.600012846888, 0.677302443521, 0.75,
        0.816222687798, 0.874255374086, 0.922595042772, 0.959989721829,
        0.985470908713, 0.998378654067, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.999291347838, 0.993634177361,
        0.982383934407, 0.965668088726, 0.943676037528, 0.916656959541,
        0.884916991715, 0.848815760567, 0.808762307473, 0.76521045406,
        0.718653660229, 0.669619433059, 0.618663349936, 0.566362763642,
        0.513310260719, 0.460106947223, 0.407355637956, 0.35565402633,
        0.305587912263, 0.257724564834, 0.212606294893, 0.170744310467,
        0.132612924568, 0.0986441810345, 0.0692229593031, 0.0446826135725,
        0.0253011957658, 0.0112983050911, 0.00283259989493, 0.0, 0.0, 0.0, 0.0,
        0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
    ])
    taper_signal(tr_ref, t_begin=3, t_end=6)
    npt.assert_allclose(tr_ref.data,
                        result_ref,
                        atol=1e-10,
                        err_msg='Tapering equal to reference')

    tr_ref = obspy.Trace(data=np.ones(100), header={'delta': 0.1})
    result_ref = np.array([
        0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
        0.00647486868104, 0.0257317790264, 0.0572719871734, 0.100278618298,
        0.153637823245, 0.215967626634, 0.285653719298, 0.360891268042,
        0.439731659872, 0.520132970055, 0.600012846888, 0.677302443521, 0.75,
        0.816222687798, 0.874255374086, 0.922595042772, 0.959989721829,
        0.985470908713, 0.998378654067, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
        1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.999825770934,
        0.998432666864, 0.995650341553, 0.991486549842, 0.985952896965,
        0.979064806205, 0.970841475907, 0.961305825967, 0.950484433951,
        0.938407461021
    ])
    taper_signal(tr_ref, t_begin=3, t_end=9)

    npt.assert_allclose(tr_ref.data,
                        result_ref,
                        atol=1e-10,
                        err_msg='Tapering equal to reference')
Exemplo n.º 16
0
 def create_stream(self,
                   starttime,
                   endtime,
                   seed_ids=None,
                   sampling_rate=None):
     """ create a waveforms from random data """
     t1 = obspy.UTCDateTime(starttime)
     t2 = obspy.UTCDateTime(endtime)
     sr = sampling_rate or self.sampling_rate
     ar_len = int((t2.timestamp - t1.timestamp) * sr)
     st = obspy.Stream()
     for seed in seed_ids or self.seed_ids:
         n, s, l, c = seed.split(".")
         meta = {
             "sampling_rate": sr,
             "starttime": t1,
             "network": n,
             "station": s,
             "location": l,
             "channel": c,
         }
         data = np.random.randn(ar_len)
         tr = obspy.Trace(data=data, header=meta)
         st.append(tr)
     return st
    def __init__(self, file_name):

        if file_name.endswith('.ascii'):
            # Read in seismogram.
            temp = np.loadtxt(file_name)
            self.t, self.data = temp[:, 0], temp[:, 1]
            self.fname = file_name
            self.directory = os.path.dirname(file_name)

            # Initialize obspy
            self.tr = obspy.Trace(data=self.data)
            self.tr.stats.delta = (self.t[1] - self.t[0])
            self.tr.stats.sampling_rate = 1 / self.tr.stats.delta
            self.tr.stats.network, self.tr.stats.station, \
                self.tr.stats.channel = \
                os.path.basename(self.fname).split('.')[:3]
            self.tr.stats.channel = self.tr.stats.channel[2]

            # Reverse X component to agree with LASIF
            if self.tr.stats.channel == 'X':
                self.data = self.data * (-1)

        elif file_name.endswith('.mseed') or file_name.endswith('.sac'):
            self.tr = obspy.read(file_name)[0]
            self.fname = file_name
            self.t = np.array([
                x * self.tr.stats.delta for x in range(0, self.tr.stats.npts)
            ])
        else:
            raise SeismogramNotFoundError(
                utils.print_red("Seismogram not "
                                "found."))
Exemplo n.º 18
0
def correlate_traces(tr1, tr2, maxshift=3600, demean=True):
    """
    Return trace of cross-correlation of two input traces

    :param tr1,tr2: two |Trace| objects
    :param maxsift: maximal shift in correlation in seconds
    """
    n1, s1, l1, c1 = tr1.id.split('.')
    n2, s2, l2, c2 = tr2.id.split('.')
    sr = tr1.stats.sampling_rate
    xdata = obscorr(tr1.data,
                    tr2.data,
                    int(round(maxshift * sr)),
                    demean=demean)
    header = {
        'network': s1,
        'station': c1,
        'location': s2,
        'channel': c2,
        'network1': n1,
        'station1': s1,
        'location1': l1,
        'channel1': c1,
        'network2': n2,
        'station2': s2,
        'location2': l2,
        'channel2': c2,
        'starttime': tr1.stats.starttime,
        'sampling_rate': sr,
    }
    return obspy.Trace(data=xdata, header=header)
Exemplo n.º 19
0
def test_create_Toeplitz_mult():
    from stfinv.utils.inversion import _create_Toeplitz_mult
    tr = obspy.Trace(data=np.array([0., 0., 0., 0., 1., 2., 1., 0., 0.]))
    st = obspy.Stream(tr)
    tr = obspy.Trace(data=np.array([0., 0., 1., 3., 2., 1., 0., 0., 0.]))
    st.append(tr)

    d = np.array([0., 0., 1., 1., 2., 1., 1., 0., 0.])
    G = _create_Toeplitz_mult(st)

    ref = [
        np.convolve(st[0].data, d, 'same'),
        np.convolve(st[1].data, d, 'same')
    ]
    res = np.matmul(G, d).reshape(2, 9)
    npt.assert_allclose(ref, res, atol=1e-7, rtol=1e-7)
Exemplo n.º 20
0
    def get_waveforms_for_event(self, event_id):
        wf_ids = self._events[event_id]["waveform_ids"]
        _t = self._dataframes["wfdisc"]

        st = obspy.Stream()

        for wf in wf_ids:
            wf = _t[_t.id == wf].iloc[0]

            with io.open(wf.filename, "rb") as fh:
                data = fh.read(4 * wf.npts)

            data = from_buffer(data, dtype=np.float32)
            # Data is big-endian - we just want to work with little endian.
            data.byteswap(True)

            tr = obspy.Trace(data=data)
            tr.stats.network = "LL"
            tr.stats.station = wf.station
            tr.stats.sampling_rate = wf.sampling_rate
            tr.stats.starttime = wf.starttime
            tr.stats.channel = wf.channel.upper()
            tr.stats.calib = wf.calib

            st.append(tr)

        return st
Exemplo n.º 21
0
def write_su_adjoint(conf, data, dt, events):
    sta_list = {}
    traces = {}
    for event in events:
        sta_list[event] = conf.get_station_list(event)
        for comp in conf.simulation.comps:
            traces[(event, comp)] = [None for s in sta_list[event]]

    for sta in data.keys():
        net, name, comp, event = sta.split(".")
        tr = obspy.Trace(data[sta].astype(np.float32), {"delta": dt})
        found = False
        for i, s in enumerate(sta_list[event]):
            if net == s["net"] and name == s["name"]:
                found = True
                break
        if not found:
            raise Exception("Stations could not be found: {}".format(sta))  # NOQA

        traces[(event, comp)][i] = tr

    for event in events:
        for comp in conf.simulation.comps:
            st = obspy.Stream(traces[(event, comp)])
            st.write(conf.get_adjoint_su_filename(event, comp),
                     format="SU", byteorder="<")
Exemplo n.º 22
0
def test_detrend():
    ts = get_live_timeseries()
    seis = get_live_seismogram()
    tse = get_live_timeseries_ensemble(3)
    seis_e = get_live_seismogram_ensemble(3)
    detrend(ts, object_history=True, alg_id="0")
    detrend(seis, object_history=True, alg_id="0")
    detrend(tse, object_history=True, alg_id="0")
    detrend(seis_e, object_history=True, alg_id="0")
    detrend(ts, type="linear", object_history=True, alg_id="0")
    detrend(ts, type="constant", object_history=True, alg_id="0")
    detrend(ts, type="polynomial", order=2, object_history=True, alg_id="0")
    detrend(ts,
            type="spline",
            order=2,
            dspline=1000,
            object_history=True,
            alg_id="0")

    # functionality verification testing
    ts = get_live_timeseries()
    tr = obspy.Trace()
    tr.data = np.array(ts.data)
    copy = np.array(ts.data)
    tr.stats.sampling_rate = 20
    tr.detrend(type="simple")
    detrend(ts, "simple", object_history=True, alg_id="0")
    assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data))
    assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy))
Exemplo n.º 23
0
    def calc_synthetic_from_grf6(self, st_data, tensor):
        st_synth = Stream()

        for tr in st_data:
            stat = tr.stats.station
            loc = tr.stats.location
            data = (self.select(station=stat, location=loc,
                                channel='MTT')[0].data * tensor.m_tt +
                    self.select(station=stat, location=loc,
                                channel='MPP')[0].data * tensor.m_pp +
                    self.select(station=stat, location=loc,
                                channel='MRR')[0].data * tensor.m_rr +
                    self.select(station=stat, location=loc,
                                channel='MTP')[0].data * tensor.m_tp +
                    self.select(station=stat, location=loc,
                                channel='MRT')[0].data * tensor.m_rt +
                    self.select(station=stat, location=loc,
                                channel='MRP')[0].data * tensor.m_rp)
            tr_synth = obspy.Trace(data=data, header=tr.stats)

            # # Convolve with STF
            # tr_synth.data = np.convolve(tr_synth.data, stf,
            #                             mode='same')[0:tr.stats.npts]
            st_synth += tr_synth

        return st_synth
Exemplo n.º 24
0
 def create_stream(
     self,
     starttime: utc_able_type,
     endtime: utc_able_type,
     seed_ids: Optional[List[str]] = None,
     sampling_rate: Optional[Union[float, int]] = None,
 ) -> obspy.Stream:
     """ create a waveforms from random data """
     t1 = to_utc(starttime)
     t2 = to_utc(endtime)
     sr = sampling_rate or self.sampling_rate
     ar_len = int((t2.timestamp - t1.timestamp) * sr)
     st = obspy.Stream()
     for seed in seed_ids or self.seed_ids:
         n, s, l, c = seed.split(".")
         meta = {
             "sampling_rate": sr,
             "starttime": t1,
             "network": n,
             "station": s,
             "location": l,
             "channel": c,
         }
         data = np.random.randn(ar_len)
         tr = obspy.Trace(data=data, header=meta)
         st.append(tr)
     return st
Exemplo n.º 25
0
def _create_test_data():
    """
    Test data used for some polarization tests.
    :return:
    """
    x = np.arange(0, 2048 / 20.0, 1.0 / 20.0)
    x *= 2. * np.pi
    y = np.cos(x)
    trZ = obspy.Trace(data=y)
    trZ.stats.sampling_rate = 20.
    trZ.stats.starttime = obspy.UTCDateTime('2014-03-01T00:00')
    trZ.stats.station = 'POLT'
    trZ.stats.channel = 'HHZ'
    trZ.stats.network = 'XX'

    trN = trZ.copy()
    trN.data *= 2.
    trN.stats.channel = 'HHN'
    trE = trZ.copy()
    trE.stats.channel = 'HHE'

    sz = obspy.Stream()
    sz.append(trZ)
    sz.append(trN)
    sz.append(trE)
    sz.sort(reverse=True)

    return sz
Exemplo n.º 26
0
def read_specfem_ascii_waveform_file(filename, network, station, channel):
    """
    Reads SPECFEM ASCII files to a :class:`~obspy.core.stream.Stream` object.

    :param filename: The filename.
    :type filename: str
    :param network: The network id of the data.
    :type network: str
    :param station: The station id of the data.
    :type station: str
    :param channel: The channel id of the data.
    :type channel: str
    """
    time_array, data = np.loadtxt(filename).T
    # Try to get a reasonably accurate sample spacing.
    dt = np.diff(time_array).mean()

    tr = obspy.Trace(data=data)
    tr.stats.network = network
    tr.stats.station = station
    tr.stats.channel = channel
    tr.stats.delta = dt
    tr.stats.starttime += time_array[0]

    return obspy.Stream(traces=[tr])
Exemplo n.º 27
0
    def test_amplitude(self):
        np.random.seed(0)

        prob = 1.0
        src = 'src'
        out = 'out'

        tf = Augment(augmentation_types=[AugmentationType.AMPLITUDE],
                     probability=1.0,
                     source=src,
                     output=out)

        trace = obspy.Trace(np.random.normal(0, 1, 1000000))
        orig_mean = trace.data.mean()
        orig_std = trace.data.std()

        data = {src: trace}
        tf(data)

        aug_mean = data[out].data.mean()
        aug_std = data[out].data.std()

        assert not np.allclose(data[src],
                               data[out]), 'output should differ from input'
        assert np.allclose(orig_mean, aug_mean,
                           atol=1e-3), 'output and input mean should be close'
        assert np.allclose(orig_std, aug_std,
                           atol=1e-1), 'output and input mean should be close'
Exemplo n.º 28
0
def _mergeChannels(st):
    """
    function to find longest continuous data chunck and discard the rest
    """
    st1 = st.copy()
    st1.merge(fill_value=0.0)
    start = max([x.stats.starttime for x in st1])
    end = min([x.stats.endtime for x in st1])
    try:
        st1.trim(starttime=start, endtime=end)
    except ValueError:  # if stream too factured end is larger than start
        return obspy.Stream()
    ar_len = min([len(x.data) for x in st1])

    ar = np.ones(ar_len)
    for tr in st1:
        ar *= tr.data
    trace = obspy.Trace(data=np.ma.masked_where(ar == 0.0, ar))
    trace.stats.starttime = start
    trace.stats.sampling_rate = st1[0].stats.sampling_rate
    if (ar == 0.0).any():

        try:
            st2 = trace.split()
        except Exception:
            return obspy.Stream()
        times = np.array([[x.stats.starttime, x.stats.endtime] for x in st2])
        df = pd.DataFrame(times, columns=['start', 'stop'])
        df['duration'] = df['stop'] - df['start']
        max_dur = df[df.duration == df['duration'].max()].iloc[0]
        st.trim(starttime=max_dur.start, endtime=max_dur.stop)
    else:
        st = st1
    return st
Exemplo n.º 29
0
    def to_file(self, fname, ftype="su"):
        if ftype != "su":
            raise ValueError(f"ftype = {ftype} not recognized.")

        stream = obspy.Stream()

        rint = lambda x: int(round(x))

        for sensor in self.sensors:
            trace = obspy.Trace(np.array(sensor.amplitude, dtype=np.float32))
            trace.stats.delta = sensor.dt
            trace.stats.starttime = obspy.UTCDateTime(2020, 12, 18, 10, 0, 0)

            if not hasattr(trace.stats, 'su'):
                trace.stats.su = {}
            trace.stats.su.trace_header = obspy.io.segy.segy.SEGYTraceHeader()
            trace.stats.su.trace_header.scalar_to_be_applied_to_all_coordinates = -1000
            trace.stats.su.trace_header.source_coordinate_x = rint(
                self.source._x * 1000)
            trace.stats.su.trace_header.source_coordinate_y = rint(
                self.source._y * 1000)
            trace.stats.su.trace_header.number_of_horizontally_stacked_traces_yielding_this_trace = rint(
                sensor.nstacks - 1)
            trace.stats.su.trace_header.delay_recording_time = rint(
                sensor.delay * 1000)
            trace.stats.su.trace_header.group_coordinate_x = rint(sensor.x *
                                                                  1000)
            trace.stats.su.trace_header.group_coordinate_y = rint(sensor.y *
                                                                  1000)
            trace.stats.su.trace_header.coordinate_units = 1

            stream.append(trace)

        stream.write(filename=fname, format="SU")
Exemplo n.º 30
0
def test_invert_STF_dampened():
    from stfinv.utils.inversion import invert_STF
    tr = obspy.Trace(data=np.array([0., 0., 0., 0., 1., 2., 1., 0., 0.]))
    st_synth = obspy.Stream(tr)
    tr = obspy.Trace(data=np.array([0., 0., 1., 3., 2., 1., 0., 0., 0.]))
    st_synth.append(tr)

    stf_ref = np.array([0., 0., 1., 1., 0., 1., 1., 0., 0.])

    tr = obspy.Trace(data=np.convolve(st_synth[0].data, stf_ref, 'same'))
    st_data = obspy.Stream(tr)
    tr = obspy.Trace(data=np.convolve(st_synth[1].data, stf_ref, 'same'))
    st_data.append(tr)

    stf = invert_STF(st_data, st_synth, method='dampened', eps=1e-4)

    npt.assert_allclose(stf, stf_ref, rtol=1e-2, atol=1e-10)