Пример #1
0
 def test_append_sanity_checks(self):
     """
     Testing sanity checks of append method.
     """
     rtr = RtTrace()
     ftr = Trace(data=np.array([0, 1]))
     # sanity checks need something already appended
     rtr.append(ftr)
     # 1 - differing ID
     tr = Trace(header={'network': 'xyz'})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'station': 'xyz'})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'location': 'xy'})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'channel': 'xyz'})
     self.assertRaises(TypeError, rtr.append, tr)
     # 2 - sample rate
     tr = Trace(header={'sampling_rate': 100.0})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'delta': 0.25})
     self.assertRaises(TypeError, rtr.append, tr)
     # 3 - calibration factor
     tr = Trace(header={'calib': 100.0})
     self.assertRaises(TypeError, rtr.append, tr)
     # 4 - data type
     tr = Trace(data=np.array([0.0, 1.1]))
     self.assertRaises(TypeError, rtr.append, tr)
     # 5 - only Trace objects are allowed
     self.assertRaises(TypeError, rtr.append, 1)
     self.assertRaises(TypeError, rtr.append, "2323")
Пример #2
0
 def test_appendSanityChecks(self):
     """
     Testing sanity checks of append method.
     """
     rtr = RtTrace()
     ftr = Trace(data=np.array([0, 1]))
     # sanity checks need something already appended
     rtr.append(ftr)
     # 1 - differing ID
     tr = Trace(header={'network': 'xyz'})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'station': 'xyz'})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'location': 'xy'})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'channel': 'xyz'})
     self.assertRaises(TypeError, rtr.append, tr)
     # 2 - sample rate
     tr = Trace(header={'sampling_rate': 100.0})
     self.assertRaises(TypeError, rtr.append, tr)
     tr = Trace(header={'delta': 0.25})
     self.assertRaises(TypeError, rtr.append, tr)
     # 3 - calibration factor
     tr = Trace(header={'calib': 100.0})
     self.assertRaises(TypeError, rtr.append, tr)
     # 4 - data type
     tr = Trace(data=np.array([0.0, 1.1]))
     self.assertRaises(TypeError, rtr.append, tr)
     # 5 - only Trace objects are allowed
     self.assertRaises(TypeError, rtr.append, 1)
     self.assertRaises(TypeError, rtr.append, "2323")
Пример #3
0
 def test_appendNotFloat32(self):
     """
     Test for not using float32.
     """
     tr = read()[0]
     tr.data = np.require(tr.data, dtype=native_str('>f4'))
     traces = tr / 3
     rtr = RtTrace()
     for trace in traces:
         rtr.append(trace)
Пример #4
0
 def test_append_not_float32(self):
     """
     Test for not using float32.
     """
     tr = read()[0]
     tr.data = np.require(tr.data, dtype=native_str('>f4'))
     traces = tr / 3
     rtr = RtTrace()
     for trace in traces:
         rtr.append(trace)
Пример #5
0
 def test_ne(self):
     """
     Testing __ne__ method.
     """
     tr = Trace()
     tr2 = RtTrace()
     tr3 = RtTrace()
     # RtTrace should never be equal with Trace objects
     self.assertTrue(tr2 != tr)
     self.assertTrue(tr2.__ne__(tr))
     self.assertFalse(tr2 != tr3)
     self.assertFalse(tr2.__ne__(tr3))
Пример #6
0
 def test_ne(self):
     """
     Testing __ne__ method.
     """
     tr = Trace()
     tr2 = RtTrace()
     tr3 = RtTrace()
     # RtTrace should never be equal with Trace objects
     self.assertNotEqual(tr2, tr)
     self.assertTrue(tr2.__ne__(tr))
     self.assertFalse(tr2 != tr3)
     self.assertFalse(tr2.__ne__(tr3))
Пример #7
0
 def test_append_overlap(self):
     """
     Appending overlapping traces should raise a UserWarning/TypeError
     """
     rtr = RtTrace()
     tr = Trace(data=np.array([0, 1]))
     rtr.append(tr)
     # this raises UserWarning
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('error', UserWarning)
         self.assertRaises(UserWarning, rtr.append, tr)
     # append with gap_overlap_check=True will raise a TypeError
     self.assertRaises(TypeError, rtr.append, tr, gap_overlap_check=True)
Пример #8
0
 def test_appendOverlap(self):
     """
     Appending overlapping traces should raise a UserWarning/TypeError
     """
     rtr = RtTrace()
     tr = Trace(data=np.array([0, 1]))
     rtr.append(tr)
     # this raises UserWarning
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('error', UserWarning)
         self.assertRaises(UserWarning, rtr.append, tr)
     # append with gap_overlap_check=True will raise a TypeError
     self.assertRaises(TypeError, rtr.append, tr, gap_overlap_check=True)
Пример #9
0
    def test_rt_gaussian_filter(self):
        from am_signal import gaussian_filter

        data_trace = self.data_trace.copy()
        gauss5, tshift = gaussian_filter(1.0, 5.0, 0.01)

        rt_trace = RtTrace()
        rt_single = RtTrace()
        for rtt in [rt_trace, rt_single]:
            rtt.registerRtProcess('convolve', conv_signal=gauss5)

        rt_single.append(data_trace, gap_overlap_check=True)

        for tr in self.traces:
            # pre-apply inversed time-shift before appending data
            tr.stats.starttime -= tshift
            rt_trace.append(tr, gap_overlap_check=True)

        # test the waveforms are the same
        diff = self.data_trace.copy()
        diff.data = rt_trace.data - rt_single.data
        self.assertAlmostEquals(np.mean(np.abs(diff)), 0.0)
        # test the time-shifts
        starttime_diff = rt_single.stats.starttime - self.data_trace.stats.starttime
        self.assertAlmostEquals(starttime_diff, 0.0)
Пример #10
0
    def test_rt_kurt_grad(self):
        win = 3.0
        data_trace = self.data_trace.copy()

        sigma = float(np.std(data_trace.data))
        fact = 1 / sigma

        rt_trace = RtTrace()
        rt_trace_single = RtTrace()

        for rtt in [rt_trace, rt_trace_single]:
            rtt.registerRtProcess('scale', factor=fact)
            rtt.registerRtProcess('kurtosis', win=win)
            rtt.registerRtProcess('boxcar', width=50)
            rtt.registerRtProcess('differentiate')
            rtt.registerRtProcess('neg_to_zero')

        rt_trace_single.append(data_trace)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)

        diff = self.data_trace.copy()
        diff.data = rt_trace_single.data - rt_trace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)), 0.0, 5)
Пример #11
0
    def test_rt_offset(self):

        offset=500

        rt_trace=RtTrace()
        rt_trace.registerRtProcess('offset',offset=offset)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)


        diff=self.data_trace.copy()
        diff.data=rt_trace.data-self.data_trace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)),offset)
Пример #12
0
 def test_appendGap(self):
     """
     Appending a traces with a time gap should raise a UserWarning/TypeError
     """
     rtr = RtTrace()
     tr = Trace(data=np.array([0, 1]))
     tr2 = Trace(data=np.array([5, 6]))
     tr2.stats.starttime = tr.stats.starttime + 10
     rtr.append(tr)
     # this raises UserWarning
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('error', UserWarning)
         self.assertRaises(UserWarning, rtr.append, tr2)
     # append with gap_overlap_check=True will raise a TypeError
     self.assertRaises(TypeError, rtr.append, tr2, gap_overlap_check=True)
Пример #13
0
 def test_append_gap(self):
     """
     Appending a traces with a time gap should raise a UserWarning/TypeError
     """
     rtr = RtTrace()
     tr = Trace(data=np.array([0, 1]))
     tr2 = Trace(data=np.array([5, 6]))
     tr2.stats.starttime = tr.stats.starttime + 10
     rtr.append(tr)
     # this raises UserWarning
     with warnings.catch_warnings(record=True):
         warnings.simplefilter('error', UserWarning)
         self.assertRaises(UserWarning, rtr.append, tr2)
     # append with gap_overlap_check=True will raise a TypeError
     self.assertRaises(TypeError, rtr.append, tr2, gap_overlap_check=True)
Пример #14
0
    def test_rt_neg_to_zero(self):

        data_trace=self.data_trace.copy()
        max_val=np.max(data_trace.data)
        
        rt_trace=RtTrace()
        rt_trace.registerRtProcess('neg_to_zero')

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)

        max_val_test=np.max(rt_trace.data)
        min_val_test=np.min(rt_trace.data)
        self.assertEqual(max_val, max_val_test)
        self.assertEqual(0.0, min_val_test)
Пример #15
0
    def test_rt_scale(self):

        data_trace = self.data_trace.copy()

        fact=1/np.std(data_trace.data)

        data_trace.data *= fact

        rt_trace=RtTrace()
        rt_trace.registerRtProcess('scale',factor=fact)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)

        diff=self.data_trace.copy()
        diff.data=rt_trace.data-data_trace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)),0.0)
Пример #16
0
    def test_rt_variance(self):


        win=10

        data_trace=self.data_trace.copy()

        rt_single=RtTrace()
        rt_trace=RtTrace()
        rt_trace.registerRtProcess('variance',win=win)
        rt_single.registerRtProcess('variance',win=win)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)
        rt_single.append(data_trace, gap_overlap_check = True)

        assert_array_almost_equal(rt_single, rt_trace)
Пример #17
0
    def _run_rt_process(self, process_list, max_length=None):
        """
        Helper function to create a RtTrace, register all given process
        functions and run the real time processing.
        """
        # assemble real time trace
        self.rt_trace = RtTrace(max_length=max_length)

        for (process, options) in process_list:
            self.rt_trace.register_rt_process(process, **options)

        # append packet data to RtTrace
        self.rt_appended_traces = []
        for trace in self.orig_trace_chunks:
            # process single trace
            result = self.rt_trace.append(trace, gap_overlap_check=True)
            # add to list of appended traces
            self.rt_appended_traces.append(result)
Пример #18
0
    def test_sw_kurtosis(self):
        win=3.0

        data_trace = self.data_trace.copy()

        rt_trace=RtTrace()
        rt_single=RtTrace()

        rt_trace.registerRtProcess('sw_kurtosis',win=win)
        rt_single.registerRtProcess('sw_kurtosis',win=win)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)
        rt_single.append(data_trace)
   
        diff=self.data_trace.copy()
        diff.data=rt_trace.data-rt_single.data
        self.assertAlmostEquals(np.mean(np.abs(diff)),0.0)
Пример #19
0
    def test_rt_mean(self):

        win=0.05

        data_trace=self.data_trace.copy()

        rt_single=RtTrace()
        rt_trace=RtTrace()
        rt_trace.registerRtProcess('mean',win=win)
        rt_single.registerRtProcess('mean',win=win)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)
        rt_single.append(data_trace, gap_overlap_check = True)

        newtr=self.data_trace.copy()
        newtr.data=newtr.data-rt_trace.data
        assert_array_almost_equal(rt_single, rt_trace)
        self.assertAlmostEqual(np.mean(newtr.data),0.0,0)
Пример #20
0
    def test_rt_gaussian_filter(self):
        from am_signal import gaussian_filter

        data_trace = self.data_trace.copy()
        gauss5,tshift = gaussian_filter(1.0, 5.0, 0.01)

        rt_trace=RtTrace()
        rt_single=RtTrace()
        for rtt in [rt_trace, rt_single]:
            rtt.registerRtProcess('convolve',conv_signal=gauss5)

        rt_single.append(data_trace, gap_overlap_check = True)

        for tr in self.traces:
            # pre-apply inversed time-shift before appending data
            tr.stats.starttime -= tshift
            rt_trace.append(tr, gap_overlap_check = True)

        # test the waveforms are the same
        diff=self.data_trace.copy()
        diff.data=rt_trace.data-rt_single.data
        self.assertAlmostEquals(np.mean(np.abs(diff)),0.0)
        # test the time-shifts
        starttime_diff=rt_single.stats.starttime-self.data_trace.stats.starttime
        self.assertAlmostEquals(starttime_diff,0.0)
Пример #21
0
    def test_rt_kurt_grad(self):
        win=3.0 
        data_trace = self.data_trace.copy()

        sigma=float(np.std(data_trace.data))
        fact = 1/sigma

        rt_trace=RtTrace()
        rt_trace_single = RtTrace()

        for rtt in [rt_trace, rt_trace_single]:
            rtt.registerRtProcess('scale',factor=fact)
            rtt.registerRtProcess('kurtosis',win=win)
            rtt.registerRtProcess('boxcar',width=50)
            rtt.registerRtProcess('differentiate')
            rtt.registerRtProcess('neg_to_zero')

        rt_trace_single.append(data_trace)
        
        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)

        diff=self.data_trace.copy()
        diff.data=rt_trace_single.data - rt_trace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)), 0.0, 5)
Пример #22
0
    def _runRtProcess(self, process_list, max_length=None):
        """
        Helper function to create a RtTrace, register all given process
        functions and run the real time processing.
        """
        # assemble real time trace
        self.rt_trace = RtTrace(max_length=max_length)

        for (process, options) in process_list:
            self.rt_trace.registerRtProcess(process, **options)

        # append packet data to RtTrace
        self.rt_appended_traces = []
        for trace in self.orig_trace_chunks:
            # process single trace
            result = self.rt_trace.append(trace, gap_overlap_check=True)
            # add to list of appended traces
            self.rt_appended_traces.append(result)
Пример #23
0
    def test_rt_offset(self):

        offset = 500

        rt_trace = RtTrace()
        rt_trace.registerRtProcess('offset', offset=offset)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)

        diff = self.data_trace.copy()
        diff.data = rt_trace.data - self.data_trace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)), offset)
Пример #24
0
    def test_rt_neg_to_zero(self):

        data_trace = self.data_trace.copy()
        max_val = np.max(data_trace.data)

        rt_trace = RtTrace()
        rt_trace.registerRtProcess('neg_to_zero')

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)

        max_val_test = np.max(rt_trace.data)
        min_val_test = np.min(rt_trace.data)
        self.assertEqual(max_val, max_val_test)
        self.assertEqual(0.0, min_val_test)
Пример #25
0
    def test_rt_kurtosis(self):
        win = 3.0
        data_trace = self.data_trace.copy()

        sigma = float(np.std(data_trace.data))
        fact = 1 / sigma

        dt = data_trace.stats.delta
        C1 = dt / float(win)

        x = data_trace.data
        ktrace = data_trace.copy()
        ktrace.data = rec_kurtosis(x * fact, C1)

        rt_trace = RtTrace()
        rt_trace.registerRtProcess('scale', factor=fact)
        rt_trace.registerRtProcess('kurtosis', win=win)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)

        diff = self.data_trace.copy()
        diff.data = rt_trace.data - ktrace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)), 0.0)
Пример #26
0
    def test_rt_scale(self):

        data_trace = self.data_trace.copy()

        fact = 1 / np.std(data_trace.data)

        data_trace.data *= fact

        rt_trace = RtTrace()
        rt_trace.registerRtProcess('scale', factor=fact)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)

        diff = self.data_trace.copy()
        diff.data = rt_trace.data - data_trace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)), 0.0)
Пример #27
0
    def test_rt_kurtosis(self):
        win=3.0
        data_trace = self.data_trace.copy()

        sigma=float(np.std(data_trace.data))
        fact = 1/sigma

        dt=data_trace.stats.delta
        C1=dt/float(win)

        x=data_trace.data
        ktrace=data_trace.copy()
        ktrace.data=rec_kurtosis(x*fact,C1)

        rt_trace=RtTrace()
        rt_trace.registerRtProcess('scale',factor=fact)
        rt_trace.registerRtProcess('kurtosis',win=win)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check = True)
   
        diff=self.data_trace.copy()
        diff.data=rt_trace.data-ktrace.data
        self.assertAlmostEquals(np.mean(np.abs(diff)),0.0)
Пример #28
0
class RtMigrator(object):
    """
    Class of objects for real-time migration.
    """

    # attributes
    x=np.array([])
    y=np.array([])
    z=np.array([])
    ttimes_matrix=np.empty((0,0), dtype=float)
    npts=0
    nsta=0
    sta_list=[]

    obs_rt_list=[]
    point_rt_list=[]
    stack_list=[]

    max_out=None
    x_out=None
    y_out=None
    z_out=None

    last_common_end_stack=[]
    last_common_end_max=None

    dt=1.0
    filter_shift=0.0


    def __init__(self,waveloc_options):
        """
        Initialize from a set of travel-times as hdf5 files
        """
        wo=waveloc_options
        # initialize the travel-times
        #############################
        ttimes_fnames=glob.glob(wo.ttimes_glob)
        # get basic lengths
        f=h5py.File(ttimes_fnames[0],'r')
        # copy the x, y, z data over
        self.x = np.array(f['x'][:])
        self.y = np.array(f['y'][:])
        self.z = np.array(f['z'][:])
        f.close()
        # read the files
        ttimes_list = []
        self.sta_list=[]
        for fname in ttimes_fnames:
            f=h5py.File(fname,'r')
            # update the list of ttimes
            ttimes_list.append(np.array(f['ttimes']))
            sta=f['ttimes'].attrs['station']
            f.close()
            # update the dictionary of station names
            self.sta_list.append(sta)
        # stack the ttimes into a numpy array
        self.ttimes_matrix=np.vstack(ttimes_list)
        (self.nsta,self.npts) = self.ttimes_matrix.shape

        # initialize the RtTrace(s)
        ##########################
        max_length = wo.opdict['max_length']
        self.safety_margin = wo.opdict['safety_margin']
        self.dt = wo.opdict['dt']

        # need a RtTrace per station 
        self.obs_rt_list=[RtTrace() for sta in self.sta_list]

        # register pre-processing
        self._register_preprocessing(wo)

        # need nsta streams for each point we test (nsta x npts)
        # for shifted waveforms
        self.point_rt_list=[[RtTrace(max_length=max_length) \
                for ista in xrange(self.nsta)] for ip in xrange(self.npts)]

        # register processing of point-streams here
        for sta_list in self.point_rt_list:
            for rtt in sta_list:
                # This is where we would scale for distance (given pre-calculated
                # distances from each point to every station)
                rtt.registerRtProcess('scale', factor=1.0)

        # need npts streams to store the point-stacks
        self.stack_list=[RtTrace(max_length=max_length) for ip in xrange(self.npts)]
        
        # register stack procesing here
        for rtt in self.stack_list:
            # This is where we would add or lower weights if we wanted to
            rtt.registerRtProcess('scale', factor=1.0)

        # need 4 output streams (max, x, y, z)
        self.max_out = RtTrace()
        self.x_out = RtTrace()
        self.y_out = RtTrace()
        self.z_out = RtTrace()

        if not wo.is_syn:
            self.max_out.registerRtProcess('boxcar', width=50)

        # need a list of common start-times
        self.last_common_end_stack = [UTCDateTime(1970,1,1) for i in xrange(self.npts)]
        self.last_common_end_max = UTCDateTime(1970,1,1) 

    def _register_preprocessing(self, waveloc_options):
        wo=waveloc_options
        
        # if this is a synthetic
        if wo.is_syn:
            # do dummy processing only
            for rtt in self.obs_rt_list:
                rtt.registerRtProcess('scale', factor=1.0)

        else:
            # get gaussian filtering parameters
            f0, sigma, dt = wo.gauss_filter
            gauss, self.filter_shift = gaussian_filter(f0, sigma, dt)
            # get kwin
            # for now just use one window
            kwin = wo.opdict['kwin']
            # register pre-processing of data here
            for rtt in self.obs_rt_list:
                rtt.registerRtProcess('convolve', conv_signal=gauss)
                rtt.registerRtProcess('sw_kurtosis', win=kwin)
                rtt.registerRtProcess('boxcar', width=50)
                rtt.registerRtProcess('differentiate')
                rtt.registerRtProcess('neg_to_zero')

    def updateData(self, tr_list):
        """
        Adds a list of traces (one per station) to the system
        """
        t_copy=0.0
        t_append=0.0
        t_append_proc=0.0
        t0_update=time.time()
        for tr in tr_list:
            if (self.dt!=tr.stats.delta):
                msg = 'Value of dt from options file %.2f does not match dt from data %2f'%(self.dt, tr.stats.delta)
                raise ValueError()
            # pre-correct for filter_shift
            #tr.stats.starttime -= np.round(self.filter_shift/self.dt) * self.dt
            tr.stats.starttime -= self.filter_shift
            sta=tr.stats.station
            ista=self.sta_list.index(sta)
            # make dtype of data float if it is not already
            tr.data=tr.data.astype(np.float32)
            t0=time.time()
            pp_data = self.obs_rt_list[ista].append(tr, gap_overlap_check = True)
            t_append_proc += time.time() - t0

            # loop over points
            for ip in xrange(self.npts):
                # do time shift and append
                t0=time.time()
                pp_data_tmp = pp_data.copy()
                t_copy += time.time() - t0
                pp_data_tmp.stats.starttime -= np.round(self.ttimes_matrix[ista,ip]/self.dt) * self.dt
                t0=time.time()
                self.point_rt_list[ip][ista].append(pp_data_tmp, gap_overlap_check = True)
                t_append += time.time() - t0

        print "In updateData : %.2f s in process and %.2f s in data copy and %.2f s in append and a total of %.2f s" % (t_append_proc, t_copy, t_append, time.time()-t0_update)

    def updateStacks(self):

        npts=self.npts
        
        for ip in xrange(npts):
            self._updateStack(ip)

    def _updateStack(self,ip):
        UTCDateTime.DEFAULT_PRECISION=2
        nsta=self.nsta
        # get common start-time for this point
        common_start=max([self.point_rt_list[ip][ista].stats.starttime \
                 for ista in xrange(nsta)])
        common_start=max(common_start,self.last_common_end_stack[ip])
        # get list of stations for which the end-time is compatible
        # with the common_start time and the safety buffer
        ista_ok=[ista for ista in xrange(nsta) if (self.point_rt_list[ip][ista].stats.endtime - common_start) > self.safety_margin]
        # get common end-time
        common_end=min([ self.point_rt_list[ip][ista].stats.endtime for ista in ista_ok])
        self.last_common_end_stack[ip]=common_end+self.dt
        # stack
        c_list=[]
        for ista in ista_ok:
            tr=self.point_rt_list[ip][ista].copy()
            tr.trim(common_start, common_end)
            c_list.append(np.array(tr.data[:]))
        tr_common=np.vstack(c_list)
        # prepare trace for passing up
        stack_data = np.sum(tr_common, axis=0)
        stats={'station':'STACK', 'npts':len(stack_data), 'delta':self.dt, \
                'starttime':common_start}
        tr=Trace(data=stack_data,header=stats)
        #import pdb; pdb.set_trace()
        # append to appropriate stack_list
        self.stack_list[ip].append(tr, gap_overlap_check = True)

    def updateMax(self):

        npts=self.npts
        nsta=self.nsta

        # now extract maximum etc from stacks
        # get common start-time for this point
        common_start=max([self.stack_list[ip].stats.starttime \
                    for ip in xrange(npts)])
        common_start=max(common_start,self.last_common_end_max)
        # get list of points for which the end-time is compatible
        # with the common_start time and the safety buffer
        ip_ok=[ip for ip in xrange(npts) if (self.stack_list[ip].stats.endtime - common_start) > self.safety_margin]
        common_end=min([self.stack_list[ip].stats.endtime for ip in ip_ok ])
        self.last_common_end_max=common_end+self.dt
        # stack
        c_list=[]
        for ip in ip_ok:
            tr=self.stack_list[ip].copy()
            tr.trim(common_start, common_end)
            c_list.append(tr.data)
        tr_common=np.vstack(c_list)
        # get maximum and the corresponding point
        max_data = np.max(tr_common, axis=0)
        argmax_data = np.argmax(tr_common, axis=0)
        # prepare traces for passing up
        # max
        stats={'station':'Max', 'npts':len(max_data), 'delta':self.dt, \
                'starttime':common_start}
        tr_max=Trace(data=max_data,header=stats)
        self.max_out.append(tr_max, gap_overlap_check = True)
        # x coordinate
        stats['station'] = 'xMax'
        tr_x=Trace(data=self.x[argmax_data],header=stats)
        self.x_out.append(tr_x, gap_overlap_check = True)
        # y coordinate
        stats['station'] = 'yMax'
        tr_y=Trace(data=self.y[argmax_data],header=stats)
        self.y_out.append(tr_y, gap_overlap_check = True)
        # z coordinate
        stats['station'] = 'zMax'
        tr_z=Trace(data=self.z[argmax_data],header=stats)
        self.z_out.append(tr_z, gap_overlap_check = True)
Пример #29
0
    def test_kwin_bank(self):
        win_list = [1.0, 3.0, 9.0]
        n_win = len(win_list)

        data_trace = self.data_trace.copy()

        sigma = float(np.std(data_trace.data))
        fact = 1 / sigma

        # One RtTrace for processing before the kurtosis
        rt_trace = RtTrace()
        rt_trace.registerRtProcess('scale', factor=fact)

        # One RtTrace per kurtosis window
        kurt_traces = []
        for i in xrange(n_win):
            rtt = RtTrace()
            rtt.registerRtProcess('kurtosis', win=win_list[i])
            kurt_traces.append(rtt)

        # One RrTrace for post-processing the max kurtosis window
        max_kurt = RtTrace()
        max_kurt.registerRtProcess('differentiate')
        max_kurt.registerRtProcess('neg_to_zero')

        for tr in self.traces:
            # prepare memory for kurtosis
            kurt_tr = tr.copy()
            # do initial processing
            proc_trace = rt_trace.append(tr, gap_overlap_check=True)
            kurt_output = []
            for i in xrange(n_win):
                # pass output of initial processing to the kwin bank
                ko = kurt_traces[i].append(proc_trace, gap_overlap_check=True)
                # append the output to the kurt_output list
                kurt_output.append(ko.data)
            # stack the output of the kwin bank and find maximum
            kurt_stack = np.vstack(tuple(kurt_output))
            kurt_tr.data = np.max(kurt_stack, axis=0)
            # append to the max_kurt RtTrace for post-processing
            max_kurt.append(kurt_tr)
Пример #30
0
 def test_copy(self):
     """
     Testing copy of RtTrace object.
     """
     rtr = RtTrace()
     rtr.copy()
     # register predefined function
     rtr.register_rt_process('integrate', test=1, muh='maeh')
     rtr.copy()
     # register ObsPy function call
     rtr.register_rt_process(signal.filter.bandpass, freqmin=0, freqmax=1,
                             df=0.1)
     rtr.copy()
     # register NumPy function call
     rtr.register_rt_process(np.square)
     rtr.copy()
Пример #31
0
class RtMigrator(object):
    """
    Class of objects for real-time migration.
    """

    # attributes
    x = np.array([])
    y = np.array([])
    z = np.array([])
    ttimes_matrix = np.empty((0, 0), dtype=float)
    npts = 0
    nsta = 0
    sta_list = []

    obs_rt_list = []
    point_rt_list = []
    stack_list = []

    max_out = None
    x_out = None
    y_out = None
    z_out = None

    last_common_end_stack = []
    last_common_end_max = None

    dt = 1.0
    filter_shift = 0.0

    def __init__(self, waveloc_options):
        """
        Initialize from a set of travel-times as hdf5 files
        """
        wo = waveloc_options
        # initialize the travel-times
        #############################
        ttimes_fnames = glob.glob(wo.ttimes_glob)
        # get basic lengths
        f = h5py.File(ttimes_fnames[0], 'r')
        # copy the x, y, z data over
        self.x = np.array(f['x'][:])
        self.y = np.array(f['y'][:])
        self.z = np.array(f['z'][:])
        f.close()
        # read the files
        ttimes_list = []
        self.sta_list = []
        for fname in ttimes_fnames:
            f = h5py.File(fname, 'r')
            # update the list of ttimes
            ttimes_list.append(np.array(f['ttimes']))
            sta = f['ttimes'].attrs['station']
            f.close()
            # update the dictionary of station names
            self.sta_list.append(sta)
        # stack the ttimes into a numpy array
        self.ttimes_matrix = np.vstack(ttimes_list)
        (self.nsta, self.npts) = self.ttimes_matrix.shape

        # initialize the RtTrace(s)
        ##########################
        max_length = wo.opdict['max_length']
        self.safety_margin = wo.opdict['safety_margin']
        self.dt = wo.opdict['dt']

        # need a RtTrace per station
        self.obs_rt_list = [RtTrace() for sta in self.sta_list]

        # register pre-processing
        self._register_preprocessing(wo)

        # need nsta streams for each point we test (nsta x npts)
        # for shifted waveforms
        self.point_rt_list=[[RtTrace(max_length=max_length) \
                for ista in xrange(self.nsta)] for ip in xrange(self.npts)]

        # register processing of point-streams here
        for sta_list in self.point_rt_list:
            for rtt in sta_list:
                # This is where we would scale for distance (given pre-calculated
                # distances from each point to every station)
                rtt.registerRtProcess('scale', factor=1.0)

        # need npts streams to store the point-stacks
        self.stack_list = [
            RtTrace(max_length=max_length) for ip in xrange(self.npts)
        ]

        # register stack procesing here
        for rtt in self.stack_list:
            # This is where we would add or lower weights if we wanted to
            rtt.registerRtProcess('scale', factor=1.0)

        # need 4 output streams (max, x, y, z)
        self.max_out = RtTrace()
        self.x_out = RtTrace()
        self.y_out = RtTrace()
        self.z_out = RtTrace()

        if not wo.is_syn:
            self.max_out.registerRtProcess('boxcar', width=50)

        # need a list of common start-times
        self.last_common_end_stack = [
            UTCDateTime(1970, 1, 1) for i in xrange(self.npts)
        ]
        self.last_common_end_max = UTCDateTime(1970, 1, 1)

    def _register_preprocessing(self, waveloc_options):
        wo = waveloc_options

        # if this is a synthetic
        if wo.is_syn:
            # do dummy processing only
            for rtt in self.obs_rt_list:
                rtt.registerRtProcess('scale', factor=1.0)

        else:
            # get gaussian filtering parameters
            f0, sigma, dt = wo.gauss_filter
            gauss, self.filter_shift = gaussian_filter(f0, sigma, dt)
            # get kwin
            # for now just use one window
            kwin = wo.opdict['kwin']
            # register pre-processing of data here
            for rtt in self.obs_rt_list:
                rtt.registerRtProcess('convolve', conv_signal=gauss)
                rtt.registerRtProcess('sw_kurtosis', win=kwin)
                rtt.registerRtProcess('boxcar', width=50)
                rtt.registerRtProcess('differentiate')
                rtt.registerRtProcess('neg_to_zero')

    def updateData(self, tr_list):
        """
        Adds a list of traces (one per station) to the system
        """
        t_copy = 0.0
        t_append = 0.0
        t_append_proc = 0.0
        t0_update = time.time()
        for tr in tr_list:
            if (self.dt != tr.stats.delta):
                msg = 'Value of dt from options file %.2f does not match dt from data %2f' % (
                    self.dt, tr.stats.delta)
                raise ValueError()
            # pre-correct for filter_shift
            #tr.stats.starttime -= np.round(self.filter_shift/self.dt) * self.dt
            tr.stats.starttime -= self.filter_shift
            sta = tr.stats.station
            ista = self.sta_list.index(sta)
            # make dtype of data float if it is not already
            tr.data = tr.data.astype(np.float32)
            t0 = time.time()
            pp_data = self.obs_rt_list[ista].append(tr, gap_overlap_check=True)
            t_append_proc += time.time() - t0

            # loop over points
            for ip in xrange(self.npts):
                # do time shift and append
                t0 = time.time()
                pp_data_tmp = pp_data.copy()
                t_copy += time.time() - t0
                pp_data_tmp.stats.starttime -= np.round(
                    self.ttimes_matrix[ista, ip] / self.dt) * self.dt
                t0 = time.time()
                self.point_rt_list[ip][ista].append(pp_data_tmp,
                                                    gap_overlap_check=True)
                t_append += time.time() - t0

        print "In updateData : %.2f s in process and %.2f s in data copy and %.2f s in append and a total of %.2f s" % (
            t_append_proc, t_copy, t_append, time.time() - t0_update)

    def updateStacks(self):

        npts = self.npts

        for ip in xrange(npts):
            self._updateStack(ip)

    def _updateStack(self, ip):
        UTCDateTime.DEFAULT_PRECISION = 2
        nsta = self.nsta
        # get common start-time for this point
        common_start=max([self.point_rt_list[ip][ista].stats.starttime \
                 for ista in xrange(nsta)])
        common_start = max(common_start, self.last_common_end_stack[ip])
        # get list of stations for which the end-time is compatible
        # with the common_start time and the safety buffer
        ista_ok = [
            ista for ista in xrange(nsta)
            if (self.point_rt_list[ip][ista].stats.endtime -
                common_start) > self.safety_margin
        ]
        # get common end-time
        common_end = min(
            [self.point_rt_list[ip][ista].stats.endtime for ista in ista_ok])
        self.last_common_end_stack[ip] = common_end + self.dt
        # stack
        c_list = []
        for ista in ista_ok:
            tr = self.point_rt_list[ip][ista].copy()
            tr.trim(common_start, common_end)
            c_list.append(np.array(tr.data[:]))
        tr_common = np.vstack(c_list)
        # prepare trace for passing up
        stack_data = np.sum(tr_common, axis=0)
        stats={'station':'STACK', 'npts':len(stack_data), 'delta':self.dt, \
                'starttime':common_start}
        tr = Trace(data=stack_data, header=stats)
        #import pdb; pdb.set_trace()
        # append to appropriate stack_list
        self.stack_list[ip].append(tr, gap_overlap_check=True)

    def updateMax(self):

        npts = self.npts
        nsta = self.nsta

        # now extract maximum etc from stacks
        # get common start-time for this point
        common_start=max([self.stack_list[ip].stats.starttime \
                    for ip in xrange(npts)])
        common_start = max(common_start, self.last_common_end_max)
        # get list of points for which the end-time is compatible
        # with the common_start time and the safety buffer
        ip_ok = [
            ip for ip in xrange(npts) if (self.stack_list[ip].stats.endtime -
                                          common_start) > self.safety_margin
        ]
        common_end = min([self.stack_list[ip].stats.endtime for ip in ip_ok])
        self.last_common_end_max = common_end + self.dt
        # stack
        c_list = []
        for ip in ip_ok:
            tr = self.stack_list[ip].copy()
            tr.trim(common_start, common_end)
            c_list.append(tr.data)
        tr_common = np.vstack(c_list)
        # get maximum and the corresponding point
        max_data = np.max(tr_common, axis=0)
        argmax_data = np.argmax(tr_common, axis=0)
        # prepare traces for passing up
        # max
        stats={'station':'Max', 'npts':len(max_data), 'delta':self.dt, \
                'starttime':common_start}
        tr_max = Trace(data=max_data, header=stats)
        self.max_out.append(tr_max, gap_overlap_check=True)
        # x coordinate
        stats['station'] = 'xMax'
        tr_x = Trace(data=self.x[argmax_data], header=stats)
        self.x_out.append(tr_x, gap_overlap_check=True)
        # y coordinate
        stats['station'] = 'yMax'
        tr_y = Trace(data=self.y[argmax_data], header=stats)
        self.y_out.append(tr_y, gap_overlap_check=True)
        # z coordinate
        stats['station'] = 'zMax'
        tr_z = Trace(data=self.z[argmax_data], header=stats)
        self.z_out.append(tr_z, gap_overlap_check=True)
Пример #32
0
    def test_rt_kurtosis_dec(self):

        win=5.0

        data_trace=self.data_trace_filt.copy()
        data_trace_dec=self.data_trace_filt.copy()
        # no need to filter as we're using a pre-filtered trace
        data_trace_dec.decimate(5,no_filter=True)


        rt_trace=RtTrace()
        rt_dec=RtTrace()
        rt_trace.registerRtProcess('kurtosis',win=win)
        rt_dec.registerRtProcess('kurtosis',win=win)

        rt_trace.append(data_trace, gap_overlap_check = True)
        rt_dec.append(data_trace_dec, gap_overlap_check = True)

        newtr=rt_trace.copy()
        newtr.decimate(5, no_filter=True)

        #assert_array_almost_equal(rt_dec.data, newtr.data, 0)
        diff=(np.max(rt_dec.data)-np.max(newtr.data)) / np.max(rt_dec.data)
        self.assertAlmostEquals(np.abs(diff) , 0.0, 2)
Пример #33
0
    def test_kwin_bank(self):
        win_list=[1.0, 3.0, 9.0] 
        n_win = len(win_list)

        data_trace = self.data_trace.copy()

        sigma=float(np.std(data_trace.data))
        fact = 1/sigma

        # One RtTrace for processing before the kurtosis
        rt_trace=RtTrace()
        rt_trace.registerRtProcess('scale',factor=fact)

        # One RtTrace per kurtosis window
        kurt_traces=[]
        for i in xrange(n_win):
            rtt=RtTrace()
            rtt.registerRtProcess('kurtosis',win=win_list[i])
            kurt_traces.append(rtt)

        # One RrTrace for post-processing the max kurtosis window
        max_kurt=RtTrace()
        max_kurt.registerRtProcess('differentiate')
        max_kurt.registerRtProcess('neg_to_zero')

        for tr in self.traces:
            # prepare memory for kurtosis
            kurt_tr=tr.copy() 
            # do initial processing
            proc_trace=rt_trace.append(tr, gap_overlap_check = True)
            kurt_output=[]
            for i in xrange(n_win):
                # pass output of initial processing to the kwin bank
                ko=kurt_traces[i].append(proc_trace, gap_overlap_check = True)
                # append the output to the kurt_output list
                kurt_output.append(ko.data)
            # stack the output of the kwin bank and find maximum
            kurt_stack=np.vstack(tuple(kurt_output))
            kurt_tr.data=np.max(kurt_stack,axis=0)
            # append to the max_kurt RtTrace for post-processing
            max_kurt.append(kurt_tr)
Пример #34
0
    def __init__(self, waveloc_options):
        """
        Initialize from a set of travel-times as hdf5 files
        """
        wo = waveloc_options
        # initialize the travel-times
        #############################
        ttimes_fnames = glob.glob(wo.ttimes_glob)
        # get basic lengths
        f = h5py.File(ttimes_fnames[0], 'r')
        # copy the x, y, z data over
        self.x = np.array(f['x'][:])
        self.y = np.array(f['y'][:])
        self.z = np.array(f['z'][:])
        f.close()
        # read the files
        ttimes_list = []
        self.sta_list = []
        for fname in ttimes_fnames:
            f = h5py.File(fname, 'r')
            # update the list of ttimes
            ttimes_list.append(np.array(f['ttimes']))
            sta = f['ttimes'].attrs['station']
            f.close()
            # update the dictionary of station names
            self.sta_list.append(sta)
        # stack the ttimes into a numpy array
        self.ttimes_matrix = np.vstack(ttimes_list)
        (self.nsta, self.npts) = self.ttimes_matrix.shape

        # initialize the RtTrace(s)
        ##########################
        max_length = wo.opdict['max_length']
        self.safety_margin = wo.opdict['safety_margin']
        self.dt = wo.opdict['dt']

        # need a RtTrace per station
        self.obs_rt_list = [RtTrace() for sta in self.sta_list]

        # register pre-processing
        self._register_preprocessing(wo)

        # need nsta streams for each point we test (nsta x npts)
        # for shifted waveforms
        self.point_rt_list=[[RtTrace(max_length=max_length) \
                for ista in xrange(self.nsta)] for ip in xrange(self.npts)]

        # register processing of point-streams here
        for sta_list in self.point_rt_list:
            for rtt in sta_list:
                # This is where we would scale for distance (given pre-calculated
                # distances from each point to every station)
                rtt.registerRtProcess('scale', factor=1.0)

        # need npts streams to store the point-stacks
        self.stack_list = [
            RtTrace(max_length=max_length) for ip in xrange(self.npts)
        ]

        # register stack procesing here
        for rtt in self.stack_list:
            # This is where we would add or lower weights if we wanted to
            rtt.registerRtProcess('scale', factor=1.0)

        # need 4 output streams (max, x, y, z)
        self.max_out = RtTrace()
        self.x_out = RtTrace()
        self.y_out = RtTrace()
        self.z_out = RtTrace()

        if not wo.is_syn:
            self.max_out.registerRtProcess('boxcar', width=50)

        # need a list of common start-times
        self.last_common_end_stack = [
            UTCDateTime(1970, 1, 1) for i in xrange(self.npts)
        ]
        self.last_common_end_max = UTCDateTime(1970, 1, 1)
Пример #35
0
    def test_rt_mean(self):

        win = 0.05

        data_trace = self.data_trace.copy()

        rt_single = RtTrace()
        rt_trace = RtTrace()
        rt_trace.registerRtProcess('mean', win=win)
        rt_single.registerRtProcess('mean', win=win)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)
        rt_single.append(data_trace, gap_overlap_check=True)

        newtr = self.data_trace.copy()
        newtr.data = newtr.data - rt_trace.data
        assert_array_almost_equal(rt_single, rt_trace)
        self.assertAlmostEqual(np.mean(newtr.data), 0.0, 0)
Пример #36
0
class RealTimeSignalTestCase(unittest.TestCase):
    """
    The obspy.realtime.signal test suite.
    """
    @classmethod
    def setUpClass(cls):
        # read test data as float64
        cls.orig_trace = read(os.path.join(os.path.dirname(__file__), 'data',
                                           'II.TLY.BHZ.SAC'),
                              dtype=np.float64)[0]
        # make really sure test data is float64
        cls.orig_trace.data = np.require(cls.orig_trace.data, np.float64)
        cls.orig_trace_chunks = cls.orig_trace / NUM_PACKETS

    def setUp(self):
        # clear results
        self.filt_trace_data = None
        self.rt_trace = None
        self.rt_appended_traces = []

    def tearDown(self):
        # use results for debug plots if enabled
        if PLOT_TRACES and self.filt_trace_data is not None and \
           self.rt_trace is not None and self.rt_appended_traces:
            self._plot_results()

    def test_square(self):
        """
        Testing np.square function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = np.square(trace)
        # filtering real time
        process_list = [(np.square, {})]
        self._run_rt_process(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_integrate(self):
        """
        Testing integrate function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = signal.integrate(trace)
        # filtering real time
        process_list = [('integrate', {})]
        self._run_rt_process(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_differentiate(self):
        """
        Testing differentiate function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = signal.differentiate(trace)
        # filtering real time
        process_list = [('differentiate', {})]
        self._run_rt_process(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_boxcar(self):
        """
        Testing boxcar function.
        """
        trace = self.orig_trace.copy()
        options = {'width': 500}
        # filtering manual
        self.filt_trace_data = signal.boxcar(trace, **options)
        # filtering real time
        process_list = [('boxcar', options)]
        self._run_rt_process(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertAlmostEqual(peak, 566974.214, 3)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_scale(self):
        """
        Testing scale function.
        """
        trace = self.orig_trace.copy()
        options = {'factor': 1000}
        # filtering manual
        self.filt_trace_data = signal.scale(trace, **options)
        # filtering real time
        process_list = [('scale', options)]
        self._run_rt_process(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertEqual(peak, 1045237000.0)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_offset(self):
        """
        Testing offset function.
        """
        trace = self.orig_trace.copy()
        options = {'offset': 500}
        # filtering manual
        self.filt_trace_data = signal.offset(trace, **options)
        # filtering real time
        process_list = [('offset', options)]
        self._run_rt_process(process_list)
        # check results
        diff = self.rt_trace.data - self.orig_trace.data
        self.assertEqual(np.mean(diff), 500)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_kurtosis(self):
        """
        Testing kurtosis function.
        """
        trace = self.orig_trace.copy()
        options = {'win': 5}
        # filtering manual
        self.filt_trace_data = signal.kurtosis(trace, **options)
        # filtering real time
        process_list = [('kurtosis', options)]
        self._run_rt_process(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_abs(self):
        """
        Testing np.abs function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = np.abs(trace)
        # filtering real time
        process_list = [(np.abs, {})]
        self._run_rt_process(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertEqual(peak, 1045237)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_tauc(self):
        """
        Testing tauc function.
        """
        trace = self.orig_trace.copy()
        options = {'width': 60}
        # filtering manual
        self.filt_trace_data = signal.tauc(trace, **options)
        # filtering real time
        process_list = [('tauc', options)]
        self._run_rt_process(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertAlmostEqual(peak, 114.302, 3)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_mwp_integral(self):
        """
        Testing mwpintegral functions.
        """
        trace = self.orig_trace.copy()
        options = {
            'mem_time': 240,
            'ref_time': trace.stats.starttime + 301.506,
            'max_time': 120,
            'gain': 1.610210e+09
        }
        # filtering manual
        self.filt_trace_data = signal.mwpintegral(self.orig_trace.copy(),
                                                  **options)
        # filtering real time
        process_list = [('mwpintegral', options)]
        self._run_rt_process(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_mwp(self):
        """
        Testing Mwp calculation using two processing functions.
        """
        trace = self.orig_trace.copy()
        epicentral_distance = 30.0855
        options = {
            'mem_time': 240,
            'ref_time': trace.stats.starttime + 301.506,
            'max_time': 120,
            'gain': 1.610210e+09
        }
        # filtering manual
        trace.data = signal.integrate(trace)
        self.filt_trace_data = signal.mwpintegral(trace, **options)
        # filtering real time
        process_list = [('integrate', {}), ('mwpintegral', options)]
        self._run_rt_process(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        mwp = signal.calculate_mwp_mag(peak, epicentral_distance)
        self.assertAlmostEqual(mwp, 8.78902911791, 5)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_combined(self):
        """
        Testing combining integrate and differentiate functions.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        trace.data = signal.integrate(trace)
        self.filt_trace_data = signal.differentiate(trace)
        # filtering real time
        process_list = [('int', {}), ('diff', {})]
        self._run_rt_process(process_list)
        # check results
        trace = self.orig_trace.copy()
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)
        np.testing.assert_almost_equal(trace.data[1:], self.rt_trace.data[1:])
        np.testing.assert_almost_equal(trace.data[1:],
                                       self.filt_trace_data[1:])

    def _run_rt_process(self, process_list, max_length=None):
        """
        Helper function to create a RtTrace, register all given process
        functions and run the real time processing.
        """
        # assemble real time trace
        self.rt_trace = RtTrace(max_length=max_length)

        for (process, options) in process_list:
            self.rt_trace.register_rt_process(process, **options)

        # append packet data to RtTrace
        self.rt_appended_traces = []
        for trace in self.orig_trace_chunks:
            # process single trace
            result = self.rt_trace.append(trace, gap_overlap_check=True)
            # add to list of appended traces
            self.rt_appended_traces.append(result)

    def _plot_results(self):
        """
        Plots original, filtered original and real time processed traces into
        a single plot.
        """
        # plot only if test is started manually
        if __name__ != '__main__':
            return
        # create empty stream
        st = Stream()
        st.label = self._testMethodName
        # original trace
        self.orig_trace.label = "Original Trace"
        st += self.orig_trace
        # use header information of original trace with filtered trace data
        tr = self.orig_trace.copy()
        tr.data = self.filt_trace_data
        tr.label = "Filtered original Trace"
        st += tr
        # real processed chunks
        for i, tr in enumerate(self.rt_appended_traces):
            tr.label = "RT Chunk %02d" % (i + 1)
            st += tr
        # real time processed trace
        self.rt_trace.label = "RT Trace"
        st += self.rt_trace
        st.plot(automerge=False, color='blue', equal_scale=False)
Пример #37
0
 def test_registerRtProcess(self):
     """
     Testing register_rt_process method.
     """
     tr = RtTrace()
     # 1 - function call
     tr.register_rt_process(np.abs)
     self.assertEqual(tr.processing, [(np.abs, {}, None)])
     # 2 - predefined RT processing algorithm
     tr.register_rt_process('integrate', test=1, muh='maeh')
     self.assertEqual(tr.processing[1][0], 'integrate')
     self.assertEqual(tr.processing[1][1], {'test': 1, 'muh': 'maeh'})
     self.assertTrue(isinstance(tr.processing[1][2][0], RtMemory))
     # 3 - contained name of predefined RT processing algorithm
     tr.register_rt_process('in')
     self.assertEqual(tr.processing[2][0], 'integrate')
     tr.register_rt_process('integ')
     self.assertEqual(tr.processing[3][0], 'integrate')
     tr.register_rt_process('integr')
     self.assertEqual(tr.processing[4][0], 'integrate')
     # 4 - unknown functions
     self.assertRaises(NotImplementedError,
                       tr.register_rt_process, 'integrate2')
     self.assertRaises(NotImplementedError, tr.register_rt_process, 'xyz')
     # 5 - module instead of function
     self.assertRaises(NotImplementedError, tr.register_rt_process, np)
     # check number off all processing steps within RtTrace
     self.assertEqual(len(tr.processing), 5)
     # check tr.stats.processing
     self.assertEqual(len(tr.stats.processing), 5)
     self.assertTrue(tr.stats.processing[0].startswith("realtime_process"))
     self.assertIn('absolute', tr.stats.processing[0])
     for i in range(1, 5):
         self.assertIn('integrate', tr.stats.processing[i])
     # check kwargs
     self.assertIn("maeh", tr.stats.processing[1])
Пример #38
0
 def test_register_rt_process(self):
     """
     Testing register_rt_process method.
     """
     tr = RtTrace()
     # 1 - function call
     tr.register_rt_process(np.abs)
     self.assertEqual(tr.processing, [(np.abs, {}, None)])
     # 2 - predefined RT processing algorithm
     tr.register_rt_process('integrate', test=1, muh='maeh')
     self.assertEqual(tr.processing[1][0], 'integrate')
     self.assertEqual(tr.processing[1][1], {'test': 1, 'muh': 'maeh'})
     self.assertTrue(isinstance(tr.processing[1][2][0], RtMemory))
     # 3 - contained name of predefined RT processing algorithm
     tr.register_rt_process('in')
     self.assertEqual(tr.processing[2][0], 'integrate')
     tr.register_rt_process('integ')
     self.assertEqual(tr.processing[3][0], 'integrate')
     tr.register_rt_process('integr')
     self.assertEqual(tr.processing[4][0], 'integrate')
     # 4 - unknown functions
     self.assertRaises(NotImplementedError, tr.register_rt_process,
                       'integrate2')
     self.assertRaises(NotImplementedError, tr.register_rt_process, 'xyz')
     # 5 - module instead of function
     self.assertRaises(NotImplementedError, tr.register_rt_process, np)
     # check number off all processing steps within RtTrace
     self.assertEqual(len(tr.processing), 5)
     # check tr.stats.processing
     self.assertEqual(len(tr.stats.processing), 5)
     self.assertTrue(tr.stats.processing[0].startswith("realtime_process"))
     self.assertIn('absolute', tr.stats.processing[0])
     for i in range(1, 5):
         self.assertIn('integrate', tr.stats.processing[i])
     # check kwargs
     self.assertIn("maeh", tr.stats.processing[1])
Пример #39
0
 def test_missing_or_wrong_argument_in_rt_process(self):
     """
     Tests handling of missing/wrong arguments.
     """
     trace = Trace(np.arange(100))
     # 1- function scale needs no additional arguments
     rt_trace = RtTrace()
     rt_trace.register_rt_process('scale')
     rt_trace.append(trace)
     # adding arbitrary arguments should fail
     rt_trace = RtTrace()
     rt_trace.register_rt_process('scale', muh='maeh')
     self.assertRaises(TypeError, rt_trace.append, trace)
     # 2- function tauc has one required argument
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc', width=10)
     rt_trace.append(trace)
     # wrong argument should fail
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc', xyz='xyz')
     self.assertRaises(TypeError, rt_trace.append, trace)
     # missing argument width should raise an exception
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc')
     self.assertRaises(TypeError, rt_trace.append, trace)
     # adding arbitrary arguments should fail
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc', width=20, notexistingoption=True)
     self.assertRaises(TypeError, rt_trace.append, trace)
Пример #40
0
    def test_rt_kurtosis_dec(self):

        win = 5.0

        data_trace = self.data_trace_filt.copy()
        data_trace_dec = self.data_trace_filt.copy()
        # no need to filter as we're using a pre-filtered trace
        data_trace_dec.decimate(5, no_filter=True)

        rt_trace = RtTrace()
        rt_dec = RtTrace()
        rt_trace.registerRtProcess('kurtosis', win=win)
        rt_dec.registerRtProcess('kurtosis', win=win)

        rt_trace.append(data_trace, gap_overlap_check=True)
        rt_dec.append(data_trace_dec, gap_overlap_check=True)

        newtr = rt_trace.copy()
        newtr.decimate(5, no_filter=True)

        #assert_array_almost_equal(rt_dec.data, newtr.data, 0)
        diff = (np.max(rt_dec.data) - np.max(newtr.data)) / np.max(rt_dec.data)
        self.assertAlmostEquals(np.abs(diff), 0.0, 2)
Пример #41
0
 def test_copy(self):
     """
     Testing copy of RtTrace object.
     """
     rtr = RtTrace()
     rtr.copy()
     # register predefined function
     rtr.register_rt_process('integrate', test=1, muh='maeh')
     rtr.copy()
     # register ObsPy function call
     rtr.register_rt_process(obspy.signal.filter.bandpass,
                             freqmin=0,
                             freqmax=1,
                             df=0.1)
     rtr.copy()
     # register NumPy function call
     rtr.register_rt_process(np.square)
     rtr.copy()
Пример #42
0
    def test_sw_kurtosis(self):
        win = 3.0

        data_trace = self.data_trace.copy()

        rt_trace = RtTrace()
        rt_single = RtTrace()

        rt_trace.registerRtProcess('sw_kurtosis', win=win)
        rt_single.registerRtProcess('sw_kurtosis', win=win)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)
        rt_single.append(data_trace)

        diff = self.data_trace.copy()
        diff.data = rt_trace.data - rt_single.data
        self.assertAlmostEquals(np.mean(np.abs(diff)), 0.0)
Пример #43
0
 def test_missingOrWrongArgumentInRtProcess(self):
     """
     Tests handling of missing/wrong arguments.
     """
     trace = Trace(np.arange(100))
     # 1- function scale needs no additional arguments
     rt_trace = RtTrace()
     rt_trace.register_rt_process('scale')
     rt_trace.append(trace)
     # adding arbitrary arguments should fail
     rt_trace = RtTrace()
     rt_trace.register_rt_process('scale', muh='maeh')
     self.assertRaises(TypeError, rt_trace.append, trace)
     # 2- function tauc has one required argument
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc', width=10)
     rt_trace.append(trace)
     # wrong argument should fail
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc', xyz='xyz')
     self.assertRaises(TypeError, rt_trace.append, trace)
     # missing argument width should raise an exception
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc')
     self.assertRaises(TypeError, rt_trace.append, trace)
     # adding arbitrary arguments should fail
     rt_trace = RtTrace()
     rt_trace.register_rt_process('tauc', width=20, notexistingoption=True)
     self.assertRaises(TypeError, rt_trace.append, trace)
Пример #44
0
class RealTimeSignalTestCase(unittest.TestCase):
    """
    The obspy.realtime.signal test suite.
    """
    def __init__(self, *args, **kwargs):
        super(RealTimeSignalTestCase, self).__init__(*args, **kwargs)
        # read test data as float64
        self.orig_trace = read(os.path.join(os.path.dirname(__file__), 'data',
                                            'II.TLY.BHZ.SAC'), dtype='f8')[0]
        # make really sure test data is float64
        self.orig_trace.data = np.require(self.orig_trace.data, 'f8')
        self.orig_trace_chunks = self.orig_trace / NUM_PACKETS

    def setUp(self):
        # clear results
        self.filt_trace_data = None
        self.rt_trace = None
        self.rt_appended_traces = []

    def tearDown(self):
        # use results for debug plots if enabled
        if PLOT_TRACES and self.filt_trace_data is not None and \
           self.rt_trace is not None and self.rt_appended_traces:
            self._plotResults()

    def test_square(self):
        """
        Testing np.square function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = np.square(trace)
        # filtering real time
        process_list = [(np.square, {})]
        self._runRtProcess(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_integrate(self):
        """
        Testing integrate function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = signal.integrate(trace)
        # filtering real time
        process_list = [('integrate', {})]
        self._runRtProcess(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_differentiate(self):
        """
        Testing differentiate function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = signal.differentiate(trace)
        # filtering real time
        process_list = [('differentiate', {})]
        self._runRtProcess(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_boxcar(self):
        """
        Testing boxcar function.
        """
        trace = self.orig_trace.copy()
        options = {'width': 500}
        # filtering manual
        self.filt_trace_data = signal.boxcar(trace, **options)
        # filtering real time
        process_list = [('boxcar', options)]
        self._runRtProcess(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertAlmostEqual(peak, 566974.214, 3)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_scale(self):
        """
        Testing scale function.
        """
        trace = self.orig_trace.copy()
        options = {'factor': 1000}
        # filtering manual
        self.filt_trace_data = signal.scale(trace, **options)
        # filtering real time
        process_list = [('scale', options)]
        self._runRtProcess(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertEqual(peak, 1045237000.0)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_offset(self):
        """
        Testing offset function.
        """
        trace = self.orig_trace.copy()
        options = {'offset': 500}
        # filtering manual
        self.filt_trace_data = signal.offset(trace, **options)
        # filtering real time
        process_list = [('offset', options)]
        self._runRtProcess(process_list)
        # check results
        diff = self.rt_trace.data - self.orig_trace.data
        self.assertEqual(np.mean(diff), 500)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_kurtosis(self):
        """
        Testing kurtosis function.
        """
        trace = self.orig_trace.copy()
        options = {'win': 5}
        # filtering manual
        self.filt_trace_data = signal.kurtosis(trace, **options)
        # filtering real time
        process_list = [('kurtosis', options)]
        self._runRtProcess(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_abs(self):
        """
        Testing np.abs function.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        self.filt_trace_data = np.abs(trace)
        # filtering real time
        process_list = [(np.abs, {})]
        self._runRtProcess(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertEqual(peak, 1045237)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_tauc(self):
        """
        Testing tauc function.
        """
        trace = self.orig_trace.copy()
        options = {'width': 60}
        # filtering manual
        self.filt_trace_data = signal.tauc(trace, **options)
        # filtering real time
        process_list = [('tauc', options)]
        self._runRtProcess(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        self.assertAlmostEqual(peak, 114.302, 3)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_mwpIntegral(self):
        """
        Testing mwpIntegral functions.
        """
        trace = self.orig_trace.copy()
        options = {'mem_time': 240,
                   'ref_time': trace.stats.starttime + 301.506,
                   'max_time': 120,
                   'gain': 1.610210e+09}
        # filtering manual
        self.filt_trace_data = signal.mwpIntegral(self.orig_trace.copy(),
                                                  **options)
        # filtering real time
        process_list = [('mwpIntegral', options)]
        self._runRtProcess(process_list)
        # check results
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_mwp(self):
        """
        Testing Mwp calculation using two processing functions.
        """
        trace = self.orig_trace.copy()
        epicentral_distance = 30.0855
        options = {'mem_time': 240,
                   'ref_time': trace.stats.starttime + 301.506,
                   'max_time': 120,
                   'gain': 1.610210e+09}
        # filtering manual
        trace.data = signal.integrate(trace)
        self.filt_trace_data = signal.mwpIntegral(trace, **options)
        # filtering real time
        process_list = [('integrate', {}), ('mwpIntegral', options)]
        self._runRtProcess(process_list)
        # check results
        peak = np.amax(np.abs(self.rt_trace.data))
        mwp = signal.calculateMwpMag(peak, epicentral_distance)
        self.assertAlmostEqual(mwp, 8.78902911791, 5)
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)

    def test_combined(self):
        """
        Testing combining integrate and differentiate functions.
        """
        trace = self.orig_trace.copy()
        # filtering manual
        trace.data = signal.integrate(trace)
        self.filt_trace_data = signal.differentiate(trace)
        # filtering real time
        process_list = [('int', {}), ('diff', {})]
        self._runRtProcess(process_list)
        # check results
        trace = self.orig_trace.copy()
        np.testing.assert_almost_equal(self.filt_trace_data,
                                       self.rt_trace.data)
        np.testing.assert_almost_equal(trace.data[1:], self.rt_trace.data[1:])
        np.testing.assert_almost_equal(trace.data[1:],
                                       self.filt_trace_data[1:])

    def _runRtProcess(self, process_list, max_length=None):
        """
        Helper function to create a RtTrace, register all given process
        functions and run the real time processing.
        """
        # assemble real time trace
        self.rt_trace = RtTrace(max_length=max_length)

        for (process, options) in process_list:
            self.rt_trace.registerRtProcess(process, **options)

        # append packet data to RtTrace
        self.rt_appended_traces = []
        for trace in self.orig_trace_chunks:
            # process single trace
            result = self.rt_trace.append(trace, gap_overlap_check=True)
            # add to list of appended traces
            self.rt_appended_traces.append(result)

    def _plotResults(self):
        """
        Plots original, filtered original and real time processed traces into
        a single plot.
        """
        # plot only if test is started manually
        if __name__ != '__main__':
            return
        # create empty stream
        st = Stream()
        st.label = self._testMethodName
        # original trace
        self.orig_trace.label = "Original Trace"
        st += self.orig_trace
        # use header information of original trace with filtered trace data
        tr = self.orig_trace.copy()
        tr.data = self.filt_trace_data
        tr.label = "Filtered original Trace"
        st += tr
        # real processed chunks
        for i, tr in enumerate(self.rt_appended_traces):
            tr.label = "RT Chunk %02d" % (i + 1)
            st += tr
        # real time processed trace
        self.rt_trace.label = "RT Trace"
        st += self.rt_trace
        st.plot(automerge=False, color='blue', equal_scale=False)
Пример #45
0
    def test_rt_dx2(self):

        win = 10

        data_trace = self.data_trace.copy()

        rt_single = RtTrace()
        rt_trace = RtTrace()
        rt_trace.registerRtProcess('dx2', win=win)
        rt_trace.registerRtProcess('boxcar', width=50)
        rt_single.registerRtProcess('dx2', win=win)
        rt_single.registerRtProcess('boxcar', width=50)

        for tr in self.traces:
            rt_trace.append(tr, gap_overlap_check=True)
        rt_single.append(data_trace, gap_overlap_check=True)

        assert_array_almost_equal(rt_single, rt_trace)
Пример #46
0
    def __init__(self,waveloc_options):
        """
        Initialize from a set of travel-times as hdf5 files
        """
        wo=waveloc_options
        # initialize the travel-times
        #############################
        ttimes_fnames=glob.glob(wo.ttimes_glob)
        # get basic lengths
        f=h5py.File(ttimes_fnames[0],'r')
        # copy the x, y, z data over
        self.x = np.array(f['x'][:])
        self.y = np.array(f['y'][:])
        self.z = np.array(f['z'][:])
        f.close()
        # read the files
        ttimes_list = []
        self.sta_list=[]
        for fname in ttimes_fnames:
            f=h5py.File(fname,'r')
            # update the list of ttimes
            ttimes_list.append(np.array(f['ttimes']))
            sta=f['ttimes'].attrs['station']
            f.close()
            # update the dictionary of station names
            self.sta_list.append(sta)
        # stack the ttimes into a numpy array
        self.ttimes_matrix=np.vstack(ttimes_list)
        (self.nsta,self.npts) = self.ttimes_matrix.shape

        # initialize the RtTrace(s)
        ##########################
        max_length = wo.opdict['max_length']
        self.safety_margin = wo.opdict['safety_margin']
        self.dt = wo.opdict['dt']

        # need a RtTrace per station 
        self.obs_rt_list=[RtTrace() for sta in self.sta_list]

        # register pre-processing
        self._register_preprocessing(wo)

        # need nsta streams for each point we test (nsta x npts)
        # for shifted waveforms
        self.point_rt_list=[[RtTrace(max_length=max_length) \
                for ista in xrange(self.nsta)] for ip in xrange(self.npts)]

        # register processing of point-streams here
        for sta_list in self.point_rt_list:
            for rtt in sta_list:
                # This is where we would scale for distance (given pre-calculated
                # distances from each point to every station)
                rtt.registerRtProcess('scale', factor=1.0)

        # need npts streams to store the point-stacks
        self.stack_list=[RtTrace(max_length=max_length) for ip in xrange(self.npts)]
        
        # register stack procesing here
        for rtt in self.stack_list:
            # This is where we would add or lower weights if we wanted to
            rtt.registerRtProcess('scale', factor=1.0)

        # need 4 output streams (max, x, y, z)
        self.max_out = RtTrace()
        self.x_out = RtTrace()
        self.y_out = RtTrace()
        self.z_out = RtTrace()

        if not wo.is_syn:
            self.max_out.registerRtProcess('boxcar', width=50)

        # need a list of common start-times
        self.last_common_end_stack = [UTCDateTime(1970,1,1) for i in xrange(self.npts)]
        self.last_common_end_max = UTCDateTime(1970,1,1) 
Пример #47
0
    def save_wave(self):

        # Fetch a wave from Ring 0
        wave = self.ring2buff.get_wave(0)

        # if wave is empty return
        if wave == {}:
            return

        # Lets try to buffer with python dictionaries and obspy
        name = wave["station"] + '.' + wave["channel"] + '.' + wave[
            "network"] + '.' + wave["location"]

        if name in self.wave_buffer:

            # Determine max samples for buffer
            max_samp = wave["samprate"] * 60 * self.minutes

            # Create a header:
            wavestats = Stats()
            wavestats.station = wave["station"]
            wavestats.network = wave["network"]
            wavestats.channel = wave["channel"]
            wavestats.location = wave["location"]
            wavestats.sampling_rate = wave["samprate"]
            wavestats.starttime = UTCDateTime(wave['startt'])

            # Create a trace
            wavetrace = Trace(header=wavestats)
            wavetrace.data = wave["data"]

            # Try to append data to buffer, if gap shutdown.
            try:
                self.wave_buffer[name].append(wavetrace,
                                              gap_overlap_check=True)
            except TypeError as err:
                logger.warning(err)
                self.runs = False
            except:
                raise
                self.runs = False

            # Debug data
            if self.debug:
                logger.info("Station Channel combo is in buffer:")
                logger.info(name)
                logger.info("Size:")
                logger.info(self.wave_buffer[name].count())
                logger.debug("Data:")
                logger.debug(self.wave_buffer[name])

        else:
            # First instance of data in buffer, create a header:
            wavestats = Stats()
            wavestats.station = wave["station"]
            wavestats.network = wave["network"]
            wavestats.channel = wave["channel"]
            wavestats.location = wave["location"]
            wavestats.sampling_rate = wave["samprate"]
            wavestats.starttime = UTCDateTime(wave['startt'])

            # Create a trace
            wavetrace = Trace(header=wavestats)
            wavetrace.data = wave["data"]

            # Create a RTTrace
            rttrace = RtTrace(int(self.minutes * 60))
            self.wave_buffer[name] = rttrace

            # Append data
            self.wave_buffer[name].append(wavetrace, gap_overlap_check=True)

            # Debug data
            if self.debug:
                logger.info("First instance of station/channel:")
                logger.info(name)
                logger.info("Size:")
                logger.info(self.wave_buffer[name].count())
                logger.debug("Data:")
                logger.debug(self.wave_buffer[name])