def test_mergePreviews(self): """ Tests the merging of Previews. """ # Merging non-preview traces in one Stream object should raise. st = Stream(traces=[Trace(data=np.empty(2)), Trace(data=np.empty(2))]) self.assertRaises(Exception, merge_previews, st) # Merging empty traces should return an new empty Stream object. st = Stream() stream_id = id(st) st2 = merge_previews(st) self.assertNotEqual(stream_id, id(st2)) self.assertEqual(len(st.traces), 0) # Different sampling rates in one Stream object causes problems. tr1 = Trace(data=np.empty(10)) tr1.stats.preview = True tr1.stats.sampling_rate = 100 tr2 = Trace(data=np.empty(10)) tr2.stats.preview = True st = Stream(traces=[tr1, tr2]) self.assertRaises(Exception, merge_previews, st) # Different data types should raise. tr1 = Trace(data=np.empty(10, dtype=np.int32)) tr1.stats.preview = True tr2 = Trace(data=np.empty(10, dtype=np.float64)) tr2.stats.preview = True st = Stream(traces=[tr1, tr2]) self.assertRaises(Exception, merge_previews, st) # Now some real tests. # 1 tr1 = Trace(data=np.array([1, 2] * 100)) tr1.stats.preview = True tr1.stats.starttime = UTCDateTime(500) tr2 = Trace(data=np.array([3, 1] * 100)) tr2.stats.preview = True tr2.stats.starttime = UTCDateTime(500) st = Stream(traces=[tr1, tr2]) st2 = merge_previews(st) self.assertEqual(len(st2.traces), 1) self.assertEqual(st2[0].stats.starttime, UTCDateTime(500)) np.testing.assert_array_equal(st2[0].data, np.array([3, 2] * 100)) # 2 tr1 = Trace(data=np.array([1] * 10)) tr1.stats.preview = True tr2 = Trace(data=np.array([2] * 9)) tr2.stats.starttime = tr2.stats.starttime + 20 tr2.stats.preview = True st = Stream(traces=[tr1, tr2]) st2 = merge_previews(st) self.assertEqual(len(st2.traces), 1) self.assertEqual(st2[0].stats.starttime, tr1.stats.starttime) np.testing.assert_array_equal(st2[0].data, np.array([1] * 10 + [-1] * 10 + [2] * 9))
def test_merge_previews_2(self): """ Test case for issue #84. """ # Note: explicitly creating np.ones instead of np.empty in order to # prevent NumPy warnings related to max function tr1 = Trace(data=np.ones(2880)) tr1.stats.starttime = UTCDateTime("2010-01-01T00:00:00.670000Z") tr1.stats.delta = 30.0 tr1.stats.preview = True tr1.verify() tr2 = Trace(data=np.ones(2881)) tr2.stats.starttime = UTCDateTime("2010-01-01T23:59:30.670000Z") tr2.stats.delta = 30.0 tr2.stats.preview = True tr2.verify() st1 = Stream([tr1, tr2]) st1.verify() # merge st2 = merge_previews(st1) st2.verify() # check assert st2[0].stats.preview assert st2[0].stats.starttime == tr1.stats.starttime assert st2[0].stats.endtime == tr2.stats.endtime assert st2[0].stats.npts == 5760 assert len(st2[0]) == 5760
def test_mergePreviews2(self): """ Test case for issue #84. """ # Note: explicitly creating np.ones instead of np.empty in order to # prevent NumPy warnings related to max function tr1 = Trace(data=np.ones(2880)) tr1.stats.starttime = UTCDateTime("2010-01-01T00:00:00.670000Z") tr1.stats.delta = 30.0 tr1.stats.preview = True tr1.verify() tr2 = Trace(data=np.ones(2881)) tr2.stats.starttime = UTCDateTime("2010-01-01T23:59:30.670000Z") tr2.stats.delta = 30.0 tr2.stats.preview = True tr2.verify() st1 = Stream([tr1, tr2]) st1.verify() # merge st2 = merge_previews(st1) st2.verify() # check self.assertTrue(st2[0].stats.preview) self.assertEqual(st2[0].stats.starttime, tr1.stats.starttime) self.assertEqual(st2[0].stats.endtime, tr2.stats.endtime) self.assertEqual(st2[0].stats.npts, 5760) self.assertEqual(len(st2[0]), 5760)
def get_preview(self, trace_ids=[], starttime=None, endtime=None, network=None, station=None, location=None, channel=None, pad=False): """ Returns the preview trace. """ # build up query session = self.session() query = session.query(WaveformChannel) # start and end time try: starttime = UTCDateTime(starttime) except Exception: starttime = UTCDateTime() - 60 * 20 finally: query = query.filter(WaveformChannel.endtime > starttime.datetime) try: endtime = UTCDateTime(endtime) except Exception: # 10 minutes endtime = UTCDateTime() finally: query = query.filter(WaveformChannel.starttime < endtime.datetime) # process arguments if trace_ids: # filter over trace id list trace_filter = or_() for trace_id in trace_ids: temp = trace_id.split('.') if len(temp) != 4: continue trace_filter.append( and_(WaveformChannel.network == temp[0], WaveformChannel.station == temp[1], WaveformChannel.location == temp[2], WaveformChannel.channel == temp[3])) if trace_filter.clauses: query = query.filter(trace_filter) else: # filter over network/station/location/channel id kwargs = { 'network': network, 'station': station, 'location': location, 'channel': channel } for key, value in kwargs.items(): if value is None: continue col = getattr(WaveformChannel, key) if '*' in value or '?' in value: value = value.replace('?', '_') value = value.replace('*', '%') query = query.filter(col.like(value)) else: query = query.filter(col == value) # execute query results = query.all() session.close() # create Stream st = Stream() for result in results: preview = result.get_preview() st.append(preview) # merge and trim st = merge_previews(st) st.trim(starttime, endtime, pad=pad) return st
def get_preview(self, trace_ids=[], starttime=None, endtime=None, network=None, station=None, location=None, channel=None, pad=False): """ Returns the preview trace. """ # build up query session = self.session() query = session.query(WaveformChannel) # start and end time try: starttime = UTCDateTime(starttime) except: starttime = UTCDateTime() - 60 * 20 finally: query = query.filter(WaveformChannel.endtime > starttime.datetime) try: endtime = UTCDateTime(endtime) except: # 10 minutes endtime = UTCDateTime() finally: query = query.filter(WaveformChannel.starttime < endtime.datetime) # process arguments if trace_ids: # filter over trace id list trace_filter = or_() for trace_id in trace_ids: temp = trace_id.split('.') if len(temp) != 4: continue trace_filter.append(and_( WaveformChannel.network == temp[0], WaveformChannel.station == temp[1], WaveformChannel.location == temp[2], WaveformChannel.channel == temp[3])) if trace_filter.clauses: query = query.filter(trace_filter) else: # filter over network/station/location/channel id kwargs = {'network': network, 'station': station, 'location': location, 'channel': channel} for key, value in kwargs.items(): if value is None: continue col = getattr(WaveformChannel, key) if '*' in value or '?' in value: value = value.replace('?', '_') value = value.replace('*', '%') query = query.filter(col.like(value)) else: query = query.filter(col == value) # execute query results = query.all() session.close() # create Stream st = Stream() for result in results: preview = result.get_preview() st.append(preview) # merge and trim st = merge_previews(st) st.trim(starttime, endtime, pad=pad) return st