示例#1
0
    def __init__(self, year, doy):
        """
        Set which day's midnight (00:00 hours) is used as a day break in the
        testing (to split the test data into two files).
        """
        self.time = UTCDateTime("%d-%03dT00:00:00" % (year, doy))
        sampling_rate = 0.1
        delta = 1 / sampling_rate
        networks = ("AB", "CD")
        stations = ("XYZ", "ZZZ3")
        locations = ("", "00")
        channels = ("HHZ", "HHN", "HHE", "BHZ", "BHN", "BHE")

        self.stream = Stream()
        for net in networks:
            for sta in stations:
                for loc in locations:
                    for cha in channels:
                        tr = Trace(data=np.arange(100, dtype=np.int32),
                                   header=dict(network=net,
                                               station=sta,
                                               location=loc,
                                               channel=cha,
                                               sampling_rate=sampling_rate,
                                               starttime=self.time -
                                               30 * delta))

                        # cut into two seamless traces
                        tr1 = tr.slice(endtime=self.time + 5 * delta)
                        tr2 = tr.slice(starttime=self.time + 6 * delta)
                        self.stream.append(tr1)
                        self.stream.append(tr2)
示例#2
0
    def __init__(self, year, doy, time=None):
        """
        Set which day's midnight (00:00 hours) is used as a day break in the
        testing (to split the test data into two files).

        If `time` is specified it overrides `year` and `doy`.
        """
        if time:
            self.time = time
        else:
            self.time = UTCDateTime("%d-%03dT00:00:00" % (year, doy))
        delta = 1.0 / self.sampling_rate

        self.stream = Stream()
        for net in self.networks:
            for sta in self.stations:
                for loc in self.locations:
                    for cha in self.channels:
                        tr = Trace(
                            data=np.arange(100, dtype=np.int32),
                            header=dict(
                                network=net, station=sta, location=loc,
                                channel=cha, sampling_rate=self.sampling_rate,
                                starttime=self.time - 30 * delta))

                        # cut into two seamless traces
                        tr1 = tr.slice(endtime=self.time + 5 * delta)
                        tr2 = tr.slice(starttime=self.time + 6 * delta)
                        self.stream.append(tr1)
                        self.stream.append(tr2)
示例#3
0
    def __init__(self, year, doy):
        """
        Set which day's midnight (00:00 hours) is used as a day break in the
        testing (to split the test data into two files).
        """
        self.time = UTCDateTime("%d-%03dT00:00:00" % (year, doy))
        sampling_rate = 0.1
        delta = 1 / sampling_rate
        networks = ("AB", "CD")
        stations = ("XYZ", "ZZZ3")
        locations = ("", "00")
        channels = ("HHZ", "HHN", "HHE", "BHZ", "BHN", "BHE")

        self.stream = Stream()
        for net in networks:
            for sta in stations:
                for loc in locations:
                    for cha in channels:
                        tr = Trace(
                            data=np.arange(100, dtype=np.int32),
                            header=dict(
                                network=net, station=sta, location=loc,
                                channel=cha, sampling_rate=sampling_rate,
                                starttime=self.time - 30 * delta))

                        # cut into two seamless traces
                        tr1 = tr.slice(endtime=self.time + 5 * delta)
                        tr2 = tr.slice(starttime=self.time + 6 * delta)
                        self.stream.append(tr1)
                        self.stream.append(tr2)
示例#4
0
 def test_slice(self):
     """
     Tests the slicing of trace objects.
     """
     tr = Trace(data=np.arange(10, dtype='int32'))
     mempos = tr.data.ctypes.data
     t = tr.stats.starttime
     tr1 = tr.slice(t + 2, t + 8)
     tr1.data[0] = 10
     self.assertEqual(tr.data[2], 10)
     self.assertEqual(tr.data.ctypes.data, mempos)
     self.assertEqual(tr.data[2:9].ctypes.data, tr1.data.ctypes.data)
     self.assertEqual(tr1.data.ctypes.data - 8, mempos)
示例#5
0
 def test_slice(self):
     """
     Tests the slicing of trace objects.
     """
     tr = Trace(data=np.arange(10, dtype='int32'))
     mempos = tr.data.ctypes.data
     t = tr.stats.starttime
     tr1 = tr.slice(t + 2, t + 8)
     tr1.data[0] = 10
     self.assertEqual(tr.data[2], 10)
     self.assertEqual(tr.data.ctypes.data, mempos)
     self.assertEqual(tr.data[2:9].ctypes.data, tr1.data.ctypes.data)
     self.assertEqual(tr1.data.ctypes.data - 8, mempos)
示例#6
0
 def test_issue317(self):
     """
     Tests times after breaking a stream into parts and merging it again.
     """
     # create a sample trace
     org_trace = Trace(data=np.arange(22487))
     org_trace.stats.starttime = UTCDateTime()
     org_trace.stats.sampling_rate = 0.999998927116
     num_pakets = 10
     # break org_trace into set of contiguous packet data
     traces = []
     packet_length = int(np.size(org_trace.data) / num_pakets)
     delta_time = org_trace.stats.delta
     tstart = org_trace.stats.starttime
     tend = tstart + delta_time * float(packet_length - 1)
     for i in range(num_pakets):
         tr = Trace(org_trace.data, org_trace.stats)
         tr = tr.slice(tstart, tend)
         traces.append(tr)
         tstart = tr.stats.endtime + delta_time
         tend = tstart + delta_time * float(packet_length - 1)
     # reconstruct original trace by adding together packet traces
     sum_trace = traces[0].copy()
     npts = traces[0].stats.npts
     for i in range(1, len(traces)):
         sum_trace = sum_trace.__add__(traces[i].copy(),
                                       method=0,
                                       interpolation_samples=0,
                                       fill_value='latest',
                                       sanity_checks=True)
         # check npts
         self.assertEqual(traces[i].stats.npts, npts)
         self.assertEqual(sum_trace.stats.npts, (i + 1) * npts)
         # check data
         np.testing.assert_array_equal(traces[i].data,
                                       np.arange(i * npts, (i + 1) * npts))
         np.testing.assert_array_equal(sum_trace.data,
                                       np.arange(0, (i + 1) * npts))
         # check delta
         self.assertEqual(traces[i].stats.delta, org_trace.stats.delta)
         self.assertEqual(sum_trace.stats.delta, org_trace.stats.delta)
         # check sampling rates
         self.assertAlmostEqual(traces[i].stats.sampling_rate,
                                org_trace.stats.sampling_rate)
         self.assertAlmostEqual(sum_trace.stats.sampling_rate,
                                org_trace.stats.sampling_rate)
         # check endtimes
         self.assertEqual(traces[i].stats.endtime, sum_trace.stats.endtime)
示例#7
0
 def test_issue317(self):
     """
     Tests times after breaking a stream into parts and merging it again.
     """
     # create a sample trace
     org_trace = Trace(data=np.arange(22487))
     org_trace.stats.starttime = UTCDateTime()
     org_trace.stats.sampling_rate = 0.999998927116
     num_pakets = 10
     # break org_trace into set of contiguous packet data
     traces = []
     packet_length = int(np.size(org_trace.data) / num_pakets)
     delta_time = org_trace.stats.delta
     tstart = org_trace.stats.starttime
     tend = tstart + delta_time * float(packet_length - 1)
     for i in range(num_pakets):
         tr = Trace(org_trace.data, org_trace.stats)
         tr = tr.slice(tstart, tend)
         traces.append(tr)
         tstart = tr.stats.endtime + delta_time
         tend = tstart + delta_time * float(packet_length - 1)
     # reconstruct original trace by adding together packet traces
     sum_trace = traces[0].copy()
     npts = traces[0].stats.npts
     for i in range(1, len(traces)):
         sum_trace = sum_trace.__add__(traces[i].copy(), method=0,
                                       interpolation_samples=0,
                                       fill_value='latest',
                                       sanity_checks=True)
         # check npts
         self.assertEquals(traces[i].stats.npts, npts)
         self.assertEquals(sum_trace.stats.npts, (i + 1) * npts)
         # check data
         np.testing.assert_array_equal(traces[i].data,
                                       np.arange(i * npts, (i + 1) * npts))
         np.testing.assert_array_equal(sum_trace.data,
                                       np.arange(0, (i + 1) * npts))
         # check delta
         self.assertEquals(traces[i].stats.delta, org_trace.stats.delta)
         self.assertEquals(sum_trace.stats.delta, org_trace.stats.delta)
         # check sampling rates
         self.assertAlmostEquals(traces[i].stats.sampling_rate,
                                 org_trace.stats.sampling_rate)
         self.assertAlmostEquals(sum_trace.stats.sampling_rate,
                                 org_trace.stats.sampling_rate)
         # check endtimes
         self.assertEquals(traces[i].stats.endtime, sum_trace.stats.endtime)
示例#8
0
    def add_trace(self, trace: Trace) -> None:
        """
        Add one TraceBuffer to another.

        If overlaps occur, new-data (from other) are kept and old-data
        (from self) are replaced.

        Parameters
        ----------
        trace
            New trace to add to the buffer - will be added in place.

        Examples
        --------
        >>> from obspy import Trace, UTCDateTime
        >>> import numpy as np
        >>> trace_buffer = TraceBuffer(
        ...     data=np.arange(10), header=dict(
        ...         station="bob", endtime=UTCDateTime(2018, 1, 1, 0, 0, 1),
        ...         delta=1.),
        ...     maxlen=15)
        >>> print(trace_buffer.data) # doctest: +NORMALIZE_WHITESPACE
        NumpyDeque(data=[-- -- -- -- -- 0 1 2 3 4 5 6 7 8 9], maxlen=15)
        >>> trace = Trace(
        ...     np.arange(7)[::-1], header=dict(
        ...         station="bob", starttime=UTCDateTime(2018, 1, 1), delta=1.))
        >>> trace_buffer.add_trace(trace)
        >>> print(trace_buffer.stats.endtime)
        2018-01-01T00:00:06.000000Z
        >>> print(trace_buffer.data) # doctest: +NORMALIZE_WHITESPACE
        NumpyDeque(data=[0 1 2 3 4 5 6 7 6 5 4 3 2 1 0], maxlen=15)

        Try adding a trace that is longer than the maxlen

        >>> trace = Trace(
        ...     np.arange(20), header=dict(
        ...         station="bob", starttime=trace_buffer.stats.endtime,
        ...         delta=1.))
        >>> print(trace.stats.endtime)
        2018-01-01T00:00:25.000000Z
        >>> trace_buffer.add_trace(trace)
        >>> print(trace_buffer.stats.endtime)
        2018-01-01T00:00:25.000000Z
        >>> print(trace_buffer.data) # doctest: +NORMALIZE_WHITESPACE
        NumpyDeque(data=[5 6 7 8 9 10 11 12 13 14 15 16 17 18 19], maxlen=15)

        Add a trace that starts after the current tracebuffer ends

        >>> trace = Trace(
        ...     np.arange(5), header=dict(
        ...         station="bob", starttime=trace_buffer.stats.endtime + 5,
        ...         delta=1.))
        >>> print(trace.stats.endtime)
        2018-01-01T00:00:34.000000Z
        >>> trace_buffer.add_trace(trace)
        >>> print(trace_buffer.stats.endtime)
        2018-01-01T00:00:34.000000Z
        >>> print(trace_buffer.data) # doctest: +NORMALIZE_WHITESPACE
        NumpyDeque(data=[15 16 17 18 19 -- -- -- -- -- 0 1 2 3 4], maxlen=15)

        Add a trace that starts one sample after the current trace ends

        >>> trace = Trace(
        ...     np.arange(5), header=dict(
        ...         station="bob",
        ...         starttime=trace_buffer.stats.endtime + trace_buffer.stats.delta,
        ...         delta=1.))
        >>> print(trace_buffer.stats.endtime)
        2018-01-01T00:00:34.000000Z
        >>> print(trace.stats.starttime)
        2018-01-01T00:00:35.000000Z
        >>> trace_buffer.add_trace(trace)
        >>> print(trace_buffer.data) # doctest: +NORMALIZE_WHITESPACE
        NumpyDeque(data=[-- -- -- -- -- 0 1 2 3 4 0 1 2 3 4], maxlen=15)
        """
        if isinstance(trace, TraceBuffer):
            trace = trace.trace
        # Check that stats match
        assert self.id == trace.id, "IDs {0} and {1} differ".format(
            self.id, trace.id)
        assert self.stats.sampling_rate == trace.stats.sampling_rate, (
            "Sampling rates {0} and {1} differ".format(
                self.stats.sampling_rate, trace.stats.sampling_rate))
        assert self.stats.calib == trace.stats.calib, (
            "Calibration factors {0} and {1} differ".format(
                self.stats.calib, trace.stats.calib))
        if len(trace) == 0:
            # Nothing to do with an empty trace.
            return
        # Remove older data than our minimum starttime - faster to if this.
        if trace.stats.starttime < self.stats.starttime:
            trace = trace.slice(starttime=self.stats.starttime)
            if len(trace) == 0:
                return
        # If data are newer in trace than in self.
        if trace.stats.endtime > self.stats.endtime:
            # If there is overlap
            if trace.stats.starttime <= self.stats.endtime:
                old_data = trace.slice(endtime=self.stats.endtime).data
                insert_start = -len(old_data)
                self.data.insert(old_data, insert_start)
                new_data = trace.slice(starttime=self.stats.endtime +
                                       self.stats.delta).data
            # If there is a gap - defined as more than 1.5 samples. Coping with
            # rounding errors in UTCDateTime.
            elif trace.stats.starttime >= self.stats.endtime + (
                    1.5 * self.stats.delta):
                new_data = np.empty(
                    trace.stats.npts +
                    int(self.stats.sampling_rate *
                        (trace.stats.starttime - self.stats.endtime)),
                    dtype=trace.data.dtype)
                mask = np.ones_like(new_data)
                new_data[-trace.stats.npts:] = trace.data
                mask[-trace.stats.npts:] = 0
                new_data = np.ma.masked_array(new_data, mask=mask)
            # Otherwise just extend with the new data.
            else:
                new_data = trace.data
            self.data.extend(new_data)
            self.stats.endtime = trace.stats.endtime
        else:
            # No new times covered - insert old data into array.
            insert_start = (trace.stats.starttime -
                            self.stats.starttime) * self.stats.sampling_rate
            # Cope with small shifts due to sampling time-stamp rounding
            assert abs(insert_start - round(insert_start)) < .1, \
                "Traces are not sampled at the same base time-stamp, {0} != {1}".format(
                    round(insert_start), insert_start)
            self.data.insert(trace.data, int(round(insert_start)))
        self.stats.npts = len(self.data.data)
示例#9
0
def extract_s(taupy_model,
              pickerlist,
              event,
              station_longitude,
              station_latitude,
              stn,
              ste,
              ba,
              win_start=-50,
              win_end=50,
              resample_hz=20,
              bp_freqmins=[0.01, 0.01, 0.5],
              bp_freqmaxs=[1, 2., 5.],
              margin=None,
              max_amplitude=1e8,
              plot_output_folder=None):

    po = event.preferred_origin
    if (not po): return None

    atimes = []
    try:
        atimes = taupy_model.get_travel_times_geo(po.depthkm,
                                                  po.lat,
                                                  po.lon,
                                                  station_latitude,
                                                  station_longitude,
                                                  phase_list=('S', ))
    except:
        return None
    # end try

    if (len(atimes) == 0): return None
    tat = atimes[0].time  # theoretical arrival time

    buffer_start = -10
    buffer_end = 10
    snrtr = None
    try:
        stn = stn.slice(po.utctime + tat + win_start + buffer_start,
                        po.utctime + tat + win_end + buffer_end)
        stn = stn.copy()
        stn.resample(resample_hz)
        stn.detrend('linear')

        if (ste):
            ste = ste.slice(po.utctime + tat + win_start + buffer_start,
                            po.utctime + tat + win_end + buffer_end)
            ste = ste.copy()
            ste.resample(resample_hz)
            ste.detrend('linear')
        # end if

        if (ste):
            if (type(stn[0].data) == np.ndarray
                    and type(ste[0].data) == np.ndarray):
                rc, tc = rotate_ne_rt(stn[0].data, ste[0].data, ba)
                snrtr = Trace(data=tc, header=stn[0].stats)
                snrtr.detrend('linear')
            # end if
        else:
            if (type(stn[0].data) == np.ndarray):
                snrtr = stn[0]
            # end if
        # end if
    except Exception as e:
        return None
    # end try

    if (type(snrtr.data) == np.ndarray):
        if (np.max(snrtr.data) > max_amplitude): return None

        pickslist = []
        snrlist = []
        residuallist = []
        bandindex = -1
        pickerindex = -1
        taper_percentage = float(buffer_end) / float(win_end)

        foundpicks = False
        for i in range(len(bp_freqmins)):
            trc = snrtr.copy()
            trc.taper(max_percentage=taper_percentage, type='hann')
            trc.filter('bandpass',
                       freqmin=bp_freqmins[i],
                       freqmax=bp_freqmaxs[i],
                       corners=4,
                       zerophase=True)
            trc = trc.slice(po.utctime + tat + win_start,
                            po.utctime + tat + win_end)

            for ipicker, picker in enumerate(pickerlist):
                try:
                    scnl, picks, polarity, snr, uncert = picker.picks(trc)

                    for ipick, pick in enumerate(picks):
                        actualArrival = pick - po.utctime
                        residual = actualArrival - tat

                        if ((margin and np.fabs(residual) < margin)
                                or (margin == None)):
                            pickslist.append(pick)

                            plotinfo = None
                            if (plot_output_folder):
                                plotinfo = {
                                    'eventid': event.public_id,
                                    'origintime': po.utctime,
                                    'mag':
                                    event.preferred_magnitude.magnitude_value,
                                    'net': trc.stats.network,
                                    'sta': trc.stats.station,
                                    'phase': 's',
                                    'ppsnr': snr[ipick],
                                    'pickid': ipick,
                                    'outputfolder': plot_output_folder
                                }
                            # end if

                            wab = snrtr.slice(pick - 3, pick + 3)
                            wab_filtered = trc.slice(pick - 3, pick + 3)
                            scales = np.logspace(0.5, 4, 30)
                            cwtsnr, dom_freq, slope_ratio = compute_quality_measures(
                                wab, wab_filtered, scales, plotinfo)
                            snrlist.append(
                                [snr[ipick], cwtsnr, dom_freq, slope_ratio])

                            residuallist.append(residual)
                            bandindex = i
                            pickerindex = ipicker

                            foundpicks = True
                        # end if
                    # end for
                except:
                    continue
                # end try
                if (foundpicks): break
            # end for
            if (foundpicks): break
        # end for

        if (len(pickslist)):
            return pickslist, residuallist, \
                   np.array(snrlist), bandindex, pickerindex
        # end if
    # end if

    return None