def test_reduce_stack(): seis1 = get_live_seismogram() seis2 = get_live_seismogram() seis_cp = np.array(seis1.data) stack(seis1, seis2) res = np.add(np.array(seis_cp), np.array(seis2.data)) for i in range(3): assert np.isclose(seis1.data[i], res[i]).all() # fixme ts1 = get_live_timeseries() ts2 = get_live_timeseries() ts1_cp = np.array(ts1.data) stack(ts1, ts2) assert np.isclose(ts1.data, (np.array(ts1_cp) + np.array(ts2.data))).all() tse1 = get_live_timeseries_ensemble(2) tse2 = get_live_timeseries_ensemble(2) tse1_cp = TimeSeriesEnsemble(tse1) stack(tse1, tse2) for i in range(2): assert np.isclose( tse1.member[i].data, np.add(np.array(tse1_cp.member[i].data), np.array(tse2.member[i].data)), ).all() seis_e1 = get_live_seismogram_ensemble(2) seis_e2 = get_live_seismogram_ensemble(2) seis_e1_cp = SeismogramEnsemble(seis_e1) stack(seis_e1, seis_e2) for i in range(2): res = np.add(np.array(seis_e1_cp.member[i].data), np.array(seis_e2.member[i].data)) for j in range(3): assert np.isclose(seis_e1.member[i].data[j], res[j]).all() # fixme
def test_templates_max_similarity(): # fixme seed id problem tse1 = get_live_timeseries_ensemble(3) tse2 = get_live_timeseries_ensemble(3) st1 = tse1.toStream() st2 = tse2.toStream() res = templates_max_similarity(tse1, 0, [tse2]) res2 = obspy.signal.cross_correlation.templates_max_similarity( st1, 0, [st2]) assert res == res2
def test_reduce_error(): tse = get_live_timeseries_ensemble(3) tse2 = get_live_timeseries_ensemble(2) with pytest.raises(IndexError) as err: logging_helper.reduce(tse, tse2, 'dummy_func', '0') assert str(err.value) == "logging_helper.reduce: data1 and data2 have different sizes of member" tse3 = get_live_timeseries_ensemble(3) ts = get_live_timeseries() with pytest.raises(TypeError) as ex: logging_helper.reduce(ts, tse3, 'dummy_func', '0') assert str(ex.value) == "logging_helper.reduce: data2 has a different type as data1"
def test_info_new_map(): # Seismogram and TimeSeries seis = get_live_seismogram() assert seis.number_of_stages() == 0 logging_helper.info(seis, 'dummy_func', '1') assert seis.number_of_stages() == 1 ts = get_live_timeseries() assert ts.number_of_stages() == 0 logging_helper.info(ts, 'dummy_func', '1') assert ts.number_of_stages() == 1 # ensemble seis_e = get_live_seismogram_ensemble(3) logging_helper.info(seis_e, 'dummy_func', '0') for i in range(3): assert seis_e.member[i].number_of_stages() == 1 seis_e = get_live_seismogram_ensemble(3) logging_helper.info(seis_e, 'dummy_func', '0', 0) assert seis_e.member[0].number_of_stages() == 1 tse = get_live_timeseries_ensemble(3) logging_helper.info(tse, 'dummy_func', '0', 0) assert tse.member[0].number_of_stages() == 1
def test_TimeSeriesEnsemble_as_Stream(): # use data object to verify converter tse = get_live_timeseries_ensemble(3) stream = tse.toStream() assert len(tse.member) == len(stream) tse_c = stream.toTimeSeriesEnsemble() assert len(tse) == len(tse_c) for k in range(3): assert np.isclose(tse.member[k].data, tse_c.member[k].data).all() # dead member is also dead after conversion tse.member[0].kill() # Add Emsemble Metadata to verify it gets handled properly tse.put_string("foo", "bar") tse.put_double("fake_lat", 22.4) tse.put_long("fake_evid", 9999) stream = tse.toStream() tse_c = stream.toTimeSeriesEnsemble() for k in range(3): assert tse.member[k].live == tse_c.member[k].live if tse.member[k].live: # the magic 4 here comes from a weird combination of # deleting the temp key in tse_c (-1) + 5 attributes # the Trace object converter puts back # net, sta, chan, starttime, endtime # This test is fragile assert len(tse.member[k]) + 4 == len(tse_c.member[k]) # dead member will be an empty object after conversion assert len(tse_c.member[0].data) == 0 # Confirm the ensemble metadata are carried through teststr = tse_c["foo"] assert teststr == "bar" assert tse_c["fake_lat"] == 22.4 assert tse_c["fake_evid"] == 9999
def test_timeseries_ensemble_as_stream(): tse = get_live_timeseries_ensemble(2) assert len(tse.member) == 2 cp = TimeSeriesEnsemble(tse) dummy_func_timeseries_ensemble_as_stream(tse) assert len(tse.member) == 5 np.isclose(cp.member[0].data, tse.member[0].data).all() np.isclose(cp.member[0].data, tse.member[1].data).all() tse = get_live_timeseries_ensemble(2) assert len(tse.member) == 2 cp = TimeSeriesEnsemble(tse) dummy_func_timeseries_ensemble_as_stream_2(data=tse) assert len(tse.member) == 5 np.isclose(cp.member[0].data, tse.member[0].data).all() np.isclose(cp.member[0].data, tse.member[1].data).all()
def test_detrend(): ts = get_live_timeseries() seis = get_live_seismogram() tse = get_live_timeseries_ensemble(3) seis_e = get_live_seismogram_ensemble(3) detrend(ts, object_history=True, alg_id="0") detrend(seis, object_history=True, alg_id="0") detrend(tse, object_history=True, alg_id="0") detrend(seis_e, object_history=True, alg_id="0") detrend(ts, type="linear", object_history=True, alg_id="0") detrend(ts, type="constant", object_history=True, alg_id="0") detrend(ts, type="polynomial", order=2, object_history=True, alg_id="0") detrend(ts, type="spline", order=2, dspline=1000, object_history=True, alg_id="0") # functionality verification testing ts = get_live_timeseries() tr = obspy.Trace() tr.data = np.array(ts.data) copy = np.array(ts.data) tr.stats.sampling_rate = 20 tr.detrend(type="simple") detrend(ts, "simple", object_history=True, alg_id="0") assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data)) assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy))
def test_all_decorators(): # test mspass_func_wrapper with pytest.raises(TypeError) as err: dummy_func_2(1) assert (str(err.value) == "mspass_func_wrapper only accepts mspass object as data input") with pytest.raises(ValueError) as err: seis = get_live_seismogram() dummy_func_2(seis, object_history=True) assert (str(err.value) == "dummy_func_2: object_history was true but alg_id not defined") assert "OK" == dummy_func_2(seis, dryrun=True) assert seis.number_of_stages() == 0 dummy_func_2(seis, object_history=True, alg_id="0") assert seis.number_of_stages() == 1 # test timeseries_as_trace ts = get_live_timeseries() cp = np.array(ts.data) dummy_func_2(ts, object_history=True, alg_id="0") assert len(cp) != len(ts.data) np.isclose([0, 1, 2], ts.data).all() assert ts.number_of_stages() == 1 # test seismogram_as_stream seis1 = get_live_seismogram() cp1 = np.array(seis1.data[0]) dummy_func_2(seis1, object_history=True, alg_id="0") assert cp1[0] != seis1.data[0, 0] assert seis1.data[0, 0] == -1 assert seis1.number_of_stages() == 1 # test timeseries_ensemble_as_stream tse = get_live_timeseries_ensemble(2) cp = TimeSeriesEnsemble(tse) dummy_func_2(tse, object_history=True, alg_id="0") assert tse.member[0].data[0] == -1 assert tse.member[0].data[0] != cp.member[0].data[0] assert tse.member[0].number_of_stages() == 1 # test seismogram_ensemble_as_stream seis_e = get_live_seismogram_ensemble(2) cp = SeismogramEnsemble(seis_e) dummy_func_2(seis_e, object_history=True, alg_id="0") assert seis_e.member[0].data[0, 0] == -1 assert seis_e.member[0].data[0, 0] != cp.member[0].data[0, 0] assert seis_e.member[0].number_of_stages() == 1 # test inplace return seis1 = get_live_seismogram() # upgrade of decorator -> should explicitly pass the positional arguments ret = dummy_func_2(seis1, object_history=True, alg_id="0") assert seis1 == ret
def test_filter(): ts = get_live_timeseries() seis = get_live_seismogram() tse = get_live_timeseries_ensemble(3) seis_e = get_live_seismogram_ensemble(3) filter(ts, "bandpass", freqmin=1, freqmax=5, object_history=True, alg_id="0") filter(seis, "bandpass", freqmin=1, freqmax=5, object_history=True, alg_id="0") filter(tse, "bandpass", freqmin=1, freqmax=5, object_history=True, alg_id="0") filter(seis_e, "bandpass", freqmin=1, freqmax=5, object_history=True, alg_id="0") filter(ts, "bandstop", freqmin=1, freqmax=5) filter(ts, "lowpass", freq=1) filter(ts, "highpass", freq=1) filter(ts, "lowpass_cheby_2", freq=1) # fixme fix testing warning # filter(ts, "lowpass_fir", freq=10) these two types are not supported # filter(ts, "remez_fir", freqmin=10, freqmax=20) # functionality verification testing ts = get_live_timeseries() tr = obspy.Trace() tr.data = np.array(ts.data) copy = np.array(ts.data) tr.stats.sampling_rate = 20 tr.filter("bandpass", freqmin=1, freqmax=5) filter(ts, "bandpass", freqmin=1, freqmax=5, object_history=True, alg_id="0") assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data)) assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy))
def test_reduce_functionality(): # Seismogram and TimeSeries seis = get_live_seismogram() assert seis.number_of_stages() == 0 logging_helper.info(seis, 'dummy_func', '1') logging_helper.info(seis, 'dummy_func_2', '2') assert seis.number_of_stages() == 2 seis2 = get_live_seismogram() assert seis2.number_of_stages() == 0 logging_helper.reduce(seis2, seis, 'reduce', '3') assert len(seis2.get_nodes()) == 3 ts = get_live_timeseries() ts2 = get_live_timeseries() assert ts.number_of_stages() == 0 logging_helper.info(ts, 'dummy_func', '1') logging_helper.info(ts, 'dummy_func', '2') assert ts.number_of_stages() == 2 logging_helper.reduce(ts2, ts, 'reduce', '3') assert len(ts2.get_nodes()) == 3 # ensemble seis_e = get_live_seismogram_ensemble(3) seis_e2 = get_live_seismogram_ensemble(3) logging_helper.info(seis_e, 'dummy_func', '0') logging_helper.info(seis_e, 'dummy_func', '1') logging_helper.info(seis_e, 'dummy_func', '2') logging_helper.reduce(seis_e2, seis_e, "reduce", "3") for i in range(3): assert len(seis_e2.member[i].get_nodes()) == 4 tse = get_live_timeseries_ensemble(3) tse2 = get_live_timeseries_ensemble(3) logging_helper.info(tse, 'dummy_func', '0') logging_helper.info(tse, 'dummy_func', '1') logging_helper.info(tse, 'dummy_func', '2') logging_helper.reduce(tse2, tse, "reduce", "3") for i in range(3): assert len(tse2.member[i].get_nodes()) == 4
def test_interpolate(): ts = get_live_timeseries() seis = get_live_seismogram() tse = get_live_timeseries_ensemble(3) seis_e = get_live_seismogram_ensemble(3) interpolate(ts, 255, object_history=True, alg_id="0") interpolate(seis, 255, object_history=True, alg_id="0") interpolate(tse, 255, object_history=True, alg_id="0") interpolate(seis_e, 255, object_history=True, alg_id="0") interpolate(ts, 255, method="lanczos", a=20, object_history=True, alg_id="0") ts = get_live_timeseries() interpolate(ts, 25, method="slinear", object_history=True, alg_id="0") ts = get_live_timeseries() interpolate(ts, 255, method="linear", object_history=True, alg_id="0") ts = get_live_timeseries() interpolate(ts, 255, method="nearest", object_history=True, alg_id="0") ts = get_live_timeseries() interpolate(ts, 255, method="zero", object_history=True, alg_id="0") # functionality verification testing ts = get_sin_timeseries() tr = obspy.Trace() tr.data = np.array(ts.data) copy = np.array(ts.data) tr.stats.sampling_rate = 20 tr.interpolate(40, method="linear", npts=500) interpolate(ts, 40, method="linear", npts=500, object_history=True, alg_id="0") assert len(ts.data) == len(tr.data) assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data)) assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy)) assert ts.dt == 1 / 40
def test_interpolate(): ts = get_live_timeseries() seis = get_live_seismogram() tse = get_live_timeseries_ensemble(3) seis_e = get_live_seismogram_ensemble(3) interpolate(ts, 255, preserve_history=True, instance='0') interpolate(seis, 255, preserve_history=True, instance='0') interpolate(tse, 255, preserve_history=True, instance='0') interpolate(seis_e, 255, preserve_history=True, instance='0') interpolate(ts, 255, method='lanczos', a=20, preserve_history=True, instance='0') ts = get_live_timeseries() interpolate(ts, 25, method='slinear', preserve_history=True, instance='0') ts = get_live_timeseries() interpolate(ts, 255, method='linear', preserve_history=True, instance='0') ts = get_live_timeseries() interpolate(ts, 255, method='nearest', preserve_history=True, instance='0') ts = get_live_timeseries() interpolate(ts, 255, method='zero', preserve_history=True, instance='0') # functionality verification testing ts = get_sin_timeseries() tr = obspy.Trace() tr.data = np.array(ts.data) copy = np.array(ts.data) tr.stats.sampling_rate = 20 tr.interpolate(40, method="linear", npts=500) interpolate(ts, 40, method='linear', npts=500, preserve_history=True, instance='0') assert len(ts.data) == len(tr.data) assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data)) assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy)) assert ts.dt == 1 / 40
def test_is_input_dead(): seis = get_live_seismogram() assert False == is_input_dead(seis) assert False == is_input_dead(any=seis) seis.kill() assert True == is_input_dead(seis) assert True == is_input_dead(any=seis) ts = get_live_timeseries() assert False == is_input_dead(ts) assert False == is_input_dead(any=ts) ts.kill() assert True == is_input_dead(ts) assert True == is_input_dead(any=ts) seis_e = get_live_seismogram_ensemble(3) assert False == is_input_dead(seis_e) assert False == is_input_dead(any=seis_e) seis_e.member[0].kill() assert False == is_input_dead(seis_e) assert False == is_input_dead(any=seis_e) seis_e.member[1].kill() seis_e.member[2].kill() assert True == is_input_dead(seis_e) assert True == is_input_dead(any=seis_e) tse = get_live_timeseries_ensemble(3) assert False == is_input_dead(tse) assert False == is_input_dead(any=tse) tse.member[0].kill() assert False == is_input_dead(tse) assert False == is_input_dead(any=tse) tse.member[1].kill() tse.member[2].kill() assert True == is_input_dead(tse) assert True == is_input_dead(any=tse)
def test_reduce_stack_exception(): tse1 = get_live_timeseries_ensemble(2) tse2 = get_live_timeseries_ensemble(3) with pytest.raises(IndexError) as err: stack(tse1, tse2) assert str(err.value) == "data1 and data2 have different sizes of member"