Ejemplo n.º 1
0
 def test_empty_query_returns_empty(self):
     """ An empty query should return an emtpy Stream """
     st = obspy.read()
     out = stream_bulk_split(st, [])
     assert len(out) == 0
     out2 = stream_bulk_split(st, None)
     assert len(out2) == 0
Ejemplo n.º 2
0
 def test_input_from_df(self, bing_pick_bulk, bingham_stream,
                        bingham_dataset):
     """ Ensure bulk can be formed from a dataframe. """
     st_client = bingham_dataset.waveform_client
     st_list = stream_bulk_split(bingham_stream, bing_pick_bulk)
     for st1, (_, ser) in zip(st_list, bing_pick_bulk.iterrows()):
         st2 = st_client.get_waveforms(*ser.to_list())
         assert_streams_almost_equal(st1, st2, allow_off_by_one=True)
Ejemplo n.º 3
0
 def test_empty_stream_returns_empty(self):
     """ An empty stream should also return an empty stream """
     st = obspy.read()
     t1, t2 = st[0].stats.starttime + 1, st[0].stats.endtime - 1
     nslc = st[0].id.split(".")
     bulk = [tuple(nslc + [t1, t2])]
     out = stream_bulk_split(obspy.Stream(), bulk)
     assert len(out) == 0
Ejemplo n.º 4
0
 def test_fill_value(self):
     """ test for filling values. """
     st_client = obspy.read()
     bulk = self.get_bulk_from_stream(st_client, [0], [[-10, -20]])
     out = stream_bulk_split(st_client, bulk, fill_value=0)[0]
     assert len(out) == 1
     # without fill value this would only be 10 sec long
     assert abs(abs(out[0].stats.endtime - out[0].stats.starttime) -
                20) < 0.1
Ejemplo n.º 5
0
 def test_two_intervals_same_stream(self):
     """ Tests for returning two intervals in the same stream. """
     st = obspy.read()
     bulk = self.get_bulk_from_stream(st, [0, 0], [[0, -15], [15, 0]])
     out = stream_bulk_split(st, bulk)
     assert len(out) == 2
     for st_out in out:
         assert len(st_out) == 1
         stats = st_out[0].stats
         out_duration = stats.endtime - stats.starttime
         assert abs(out_duration - 15) <= stats.sampling_rate * 2
Ejemplo n.º 6
0
 def test_no_bulk_matches(self):
     """ Test when multiple bulk parameters don't match any traces. """
     st = obspy.read()
     bulk = []
     for tr in st:
         utc = obspy.UTCDateTime("2017-09-18")
         t1, t2 = utc, utc
         bulk.append(tuple([*tr.id.split(".") + [t1, t2]]))
     out = stream_bulk_split(st, bulk)
     assert len(out) == len(bulk)
     for tr in out:
         assert isinstance(tr, obspy.Stream)
Ejemplo n.º 7
0
 def test_two_inter(self):
     """ Tests for getting data completely contained in available range.  """
     # setup stream and bulk args
     st = obspy.read()
     duration = st[0].stats.endtime - st[0].stats.starttime
     bulk = self.get_bulk_from_stream(st, [0, 1], [[5, -5], [5, -5]])
     # request data, check durations
     out = stream_bulk_split(st, bulk)
     for st_out in out:
         assert len(st_out) == 1
         stats = st_out[0].stats
         out_duration = stats.endtime - stats.starttime
         assert np.isclose(duration - out_duration, 10)
Ejemplo n.º 8
0
 def test_stream_bulk_split(self):
     """ Ensure the basic stream to trace works. """
     # get bulk params
     st = obspy.read()
     t1, t2 = st[0].stats.starttime + 1, st[0].stats.endtime - 1
     nslc = st[0].id.split(".")
     bulk = [tuple(nslc + [t1, t2])]
     # create traces, check len
     streams = stream_bulk_split(st, bulk)
     assert len(streams) == 1
     # assert trace after trimming is equal to before
     t_expected = obspy.Stream([st[0].trim(starttime=t1, endtime=t2)])
     assert t_expected == streams[0]
Ejemplo n.º 9
0
 def test_two_overlap(self):
     """
     Tests for when there is an overlap of available data and
     requested data but some data are not available.
     """
     # setup stream and bulk args
     st = obspy.read()
     duration = st[0].stats.endtime - st[0].stats.starttime
     bulk = self.get_bulk_from_stream(st, [0, 1], [[-5, -5], [-5, -5]])
     # request data, check durations
     out = stream_bulk_split(st, bulk)
     for st_out in out:
         assert len(st_out) == 1
         stats = st_out[0].stats
         out_duration = stats.endtime - stats.starttime
         assert np.isclose(duration - out_duration, 5)
Ejemplo n.º 10
0
 def _get_st_array(self, waveforms: waveform_clientable_type,
                   preprocess: Callable) -> np.ndarray:
     """Return an array of streams, one for each row in chan info."""
     stats = self.stats
     if (stats["starttime"].isnull() | stats["endtime"].isnull()).any():
         msg = ("Time windows must be assigned to the StatsGroup prior to "
                "TraceGroup creation")
         raise ValueError(msg)
     # get bulk argument and streams
     bulk = self._get_bulk(stats)
     # ensure waveforms is a stream, then get a list of streams
     if not isinstance(waveforms, obspy.Stream):
         waveforms = waveforms.get_waveforms_bulk(bulk)
     # apply preprocessor if provided
     if preprocess is not None:
         waveforms = preprocess(waveforms)
     # get array of streams
     st_list = stream_bulk_split(waveforms, bulk)
     ar = np.zeros(len(st_list)).astype(object)
     ar[:] = st_list
     return ar
Ejemplo n.º 11
0
    def _get_data_and_stats(self, bulk):
        """ Using a waveform client return an array of data and stats. """
        waveforms = self.client
        # Get a stream of waveforms.
        if not isinstance(waveforms, obspy.Stream):
            waveforms = self._get_waveforms_bulk(waveforms, bulk)
        # There isn't guaranteed to be a trace for each bulk arg, so use
        # stream_bulk_split to make it so.
        st_list = stream_bulk_split(waveforms, bulk, fill_value=np.NaN)
        # make sure the data are merged together with a sensible fill value
        arrays, stats = [], []
        for st, b in zip(st_list, bulk):
            assert len(st) in {0, 1}, "st should either be empty or len 1"
            if not len(st):  # empty data still needs an entry and stats from bulk
                arrays.append(np.array(np.NaN))
                statskwargs = {i: v for i, v in zip(BULK_WAVEFORM_COLUMNS, b)}
                stats.append(statskwargs)
                continue
            arrays.append(st[0].data)
            stats.append(dict(st[0].stats))

        return arrays, stats