Ejemplo n.º 1
0
def test_RFdecon():
    seis1 = get_live_seismogram(71, 2.0)
    seis1.t0 = -5

    seis2 = get_live_seismogram(71, 2.0)
    seis2.t0 = -5
    for i in range(3):
        for j in range(seis2.npts):
            seis2.data[i, j] = seis1.data[i, j]

    processor = RFdeconProcessor()
    processor.loaddata(seis1)
    processor.loadnoise(seis1, window=True)
    processor.loadwavelet(seis1, window=True)
    result1 = WindowData(seis1, processor.dwin.start, processor.dwin.end)
    for k in range(3):
        processor.loaddata(result1, component=k)
        x = processor.apply()
        for i in range(seis1.npts):
            result1.data[k, i] = x[i]

    result2 = RFdecon(seis2)

    for k in range(3):
        assert all(
            abs(a - b) < 1e-6
            for a, b in zip(result1.data[k], result2.data[k]))
Ejemplo n.º 2
0
def test_mspass_func_wrapper_multi():
    with pytest.raises(TypeError) as err:
        dummy_func_multi(1, 2)
    assert (str(
        err.value
    ) == "mspass_func_wrapper_multi only accepts mspass object as data input")

    with pytest.raises(ValueError) as err:
        seis1 = get_live_seismogram()
        seis2 = get_live_seismogram()
        dummy_func_multi(seis1, seis2, object_history=True)
    assert (str(err.value) ==
            "dummy_func_multi: object_history was true but alg_id not defined")

    assert "OK" == dummy_func_multi(seis1, seis2, dryrun=True)

    assert seis1.number_of_stages() == 0
    assert seis2.number_of_stages() == 0
    dummy_func_multi(seis1, seis2, object_history=True, alg_id="0")
    assert seis1.number_of_stages() == 1
    assert seis2.number_of_stages() == 1

    seis_e = get_live_seismogram_ensemble(3)
    for i in range(3):
        assert seis_e.member[i].number_of_stages() == 0
    dummy_func_multi(seis1, seis_e, object_history=True, alg_id="0")
    assert seis1.number_of_stages() == 2
    for i in range(3):
        assert seis_e.member[i].number_of_stages() == 1

    # dead object will return immediately
    seis1.kill()
    seis2.kill()
    data = dummy_func_multi(seis1, seis2)
    assert data is None
Ejemplo n.º 3
0
def test_reduce_stack():
    seis1 = get_live_seismogram()
    seis2 = get_live_seismogram()
    seis_cp = np.array(seis1.data)
    stack(seis1, seis2)
    res = np.add(np.array(seis_cp), np.array(seis2.data))
    for i in range(3):
        assert np.isclose(seis1.data[i], res[i]).all()  # fixme

    ts1 = get_live_timeseries()
    ts2 = get_live_timeseries()
    ts1_cp = np.array(ts1.data)
    stack(ts1, ts2)
    assert np.isclose(ts1.data, (np.array(ts1_cp) + np.array(ts2.data))).all()

    tse1 = get_live_timeseries_ensemble(2)
    tse2 = get_live_timeseries_ensemble(2)
    tse1_cp = TimeSeriesEnsemble(tse1)
    stack(tse1, tse2)
    for i in range(2):
        assert np.isclose(
            tse1.member[i].data,
            np.add(np.array(tse1_cp.member[i].data),
                   np.array(tse2.member[i].data)),
        ).all()

    seis_e1 = get_live_seismogram_ensemble(2)
    seis_e2 = get_live_seismogram_ensemble(2)
    seis_e1_cp = SeismogramEnsemble(seis_e1)
    stack(seis_e1, seis_e2)
    for i in range(2):
        res = np.add(np.array(seis_e1_cp.member[i].data),
                     np.array(seis_e2.member[i].data))
        for j in range(3):
            assert np.isclose(seis_e1.member[i].data[j], res[j]).all()  # fixme
Ejemplo n.º 4
0
def test_RFdeconProcessor():
    decon_processor = RFdeconProcessor(alg="MultiTaperXcor")

    seis_data = get_live_seismogram()
    seis_wavelet = get_live_seismogram()
    seis_noise = get_live_seismogram()

    decon_processor.loaddata(seis_data)
    decon_processor.loadwavelet(seis_wavelet)
    decon_processor.loadnoise(seis_noise)

    # decon_processor_copy = pickle.loads(pickle.dumps(decon_processor))
    data = pickle.dumps(decon_processor)
    decon_processor_copy = pickle.loads(data)

    assert (decon_processor.dvector == decon_processor_copy.dvector).all()
    assert (decon_processor.wvector == decon_processor_copy.wvector).all()
    assert (decon_processor.nvector == decon_processor_copy.nvector).all()

    seis_data.npts = 8
    seis_data.data[2] = [1, -1, 0, 0, 0, 0, 0, 0]
    seis_data.data[1] = [0, 1, -1, 0, 0, 0, 0, 0]
    seis_data.data[0] = [0, 0, -1, 1, 0, 0, 0, 0]

    decon_processor = RFdeconProcessor()
    decon_processor.loaddata(seis_data)
    decon_processor.loadwavelet(seis_data)
    decon_processor_copy = pickle.loads(pickle.dumps(decon_processor))
    result1 = np.array(decon_processor.apply())
    result2 = np.array(decon_processor_copy.apply())
    assert all(abs(a - b) < 1e-6 for a, b in zip(result1, result2))
Ejemplo n.º 5
0
def test_xcorr_3c():
    seis1 = get_live_seismogram()
    seis2 = get_live_seismogram()
    st1 = seis1.toStream()
    st2 = seis2.toStream()
    res1 = xcorr_3c(seis1, seis2, 1)
    res2 = obspy.signal.cross_correlation.xcorr_3c(st1, st2, 1)
    assert res1 == res2
Ejemplo n.º 6
0
def test_all_decorators():
    # test mspass_func_wrapper
    with pytest.raises(TypeError) as err:
        dummy_func_2(1)
    assert (str(err.value) ==
            "mspass_func_wrapper only accepts mspass object as data input")

    with pytest.raises(ValueError) as err:
        seis = get_live_seismogram()
        dummy_func_2(seis, object_history=True)
    assert (str(err.value) ==
            "dummy_func_2: object_history was true but alg_id not defined")

    assert "OK" == dummy_func_2(seis, dryrun=True)

    assert seis.number_of_stages() == 0
    dummy_func_2(seis, object_history=True, alg_id="0")
    assert seis.number_of_stages() == 1

    # test timeseries_as_trace
    ts = get_live_timeseries()
    cp = np.array(ts.data)
    dummy_func_2(ts, object_history=True, alg_id="0")
    assert len(cp) != len(ts.data)
    np.isclose([0, 1, 2], ts.data).all()
    assert ts.number_of_stages() == 1

    # test seismogram_as_stream
    seis1 = get_live_seismogram()
    cp1 = np.array(seis1.data[0])
    dummy_func_2(seis1, object_history=True, alg_id="0")
    assert cp1[0] != seis1.data[0, 0]
    assert seis1.data[0, 0] == -1
    assert seis1.number_of_stages() == 1

    # test timeseries_ensemble_as_stream
    tse = get_live_timeseries_ensemble(2)
    cp = TimeSeriesEnsemble(tse)
    dummy_func_2(tse, object_history=True, alg_id="0")
    assert tse.member[0].data[0] == -1
    assert tse.member[0].data[0] != cp.member[0].data[0]
    assert tse.member[0].number_of_stages() == 1

    # test seismogram_ensemble_as_stream
    seis_e = get_live_seismogram_ensemble(2)
    cp = SeismogramEnsemble(seis_e)
    dummy_func_2(seis_e, object_history=True, alg_id="0")
    assert seis_e.member[0].data[0, 0] == -1
    assert seis_e.member[0].data[0, 0] != cp.member[0].data[0, 0]
    assert seis_e.member[0].number_of_stages() == 1

    # test inplace return
    seis1 = get_live_seismogram()
    # upgrade of decorator -> should explicitly pass the positional arguments
    ret = dummy_func_2(seis1, object_history=True, alg_id="0")
    assert seis1 == ret
Ejemplo n.º 7
0
def test_seismogram_as_trace():
    seis1 = get_live_seismogram()
    seis2 = get_live_seismogram()
    cp1 = np.array(seis1.data[0])
    cp2 = np.array(seis2.data[0])
    dummy_func_seismogram_as_stream(seis1, seis2)
    assert cp1[0] != seis1.data[0, 0]
    assert cp2[0] != seis2.data[0, 0]
    assert seis1.data[0, 0] == -1
    assert seis2.data[0, 0] == -1
    assert seis1["test"] == "test"
Ejemplo n.º 8
0
def test_correlate_stream_template():
    tse1 = get_live_seismogram()
    tse2 = get_live_seismogram()
    st1 = tse1.toStream()
    st2 = tse2.toStream()
    res1 = correlate_stream_template(tse1,
                                     tse2,
                                     object_history=True,
                                     alg_id="0")
    res2 = obspy.signal.cross_correlation.correlate_stream_template(st1, st2)
    for i in range(3):
        assert all(
            abs(a - b) < 0.001 for a, b in zip(res1.data[i, :], res2[i].data))
Ejemplo n.º 9
0
def test_copy_helpers():
    ts1 = get_live_timeseries()
    assert ts1.dt != 1 / 255
    ts2 = get_live_timeseries()
    ts2.dt = 1 / 255
    timeseries_copy_helper(ts1, ts2)
    assert ts1.dt == 1 / 255

    seis1 = get_live_seismogram()
    assert seis1.dt != 1 / 255
    seis2 = get_live_seismogram()
    seis2.dt = 1 / 255
    seismogram_copy_helper(seis1, seis2)
    assert seis1.dt == 1 / 255
Ejemplo n.º 10
0
def test_mspass_reduce_func_wrapper():
    ts1 = get_live_timeseries()
    ts1.data[0] = 1
    ts2 = get_live_timeseries()
    logging_helper.info(ts2, "dummy_func", "1")
    logging_helper.info(ts2, "dummy_func_2", "2")
    assert len(ts1.get_nodes()) == 0
    dummy_reduce_func(ts1, ts2, object_history=True, alg_id="3")
    assert ts1.data[0] == -1
    assert len(ts1.get_nodes()) == 3

    with pytest.raises(TypeError) as err:
        dummy_reduce_func([0], [1], object_history=True, alg_id="3")
    assert (str(err.value) ==
            "only mspass objects are supported in reduce wrapped methods")

    with pytest.raises(TypeError) as err:
        dummy_reduce_func(ts1,
                          get_live_seismogram(),
                          object_history=True,
                          alg_id="3")
    assert str(err.value) == "data2 has a different type as data1"

    with pytest.raises(ValueError) as err:
        seis1 = get_live_seismogram()
        seis2 = get_live_seismogram()
        dummy_reduce_func(seis1, seis2, object_history=True)
    assert (str(
        err.value
    ) == "dummy_reduce_func: object_history was true but alg_id not defined")

    assert "OK" == dummy_reduce_func(seis1, seis2, dryrun=True)

    ts1 = get_live_timeseries()
    ts2 = get_live_timeseries()
    assert len(ts1.elog.get_error_log()) == 0
    dummy_reduce_func_runtime(ts1, ts2, object_history=True, alg_id="3")
    assert len(ts1.elog.get_error_log()) == 1
    assert len(ts2.elog.get_error_log()) == 1

    ts1 = get_live_timeseries()
    ts2 = get_live_timeseries()
    assert len(ts1.elog.get_error_log()) == 0
    with pytest.raises(MsPASSError) as err:
        dummy_reduce_func_mspasserror(ts1,
                                      ts2,
                                      object_history=True,
                                      alg_id="3")
    assert str(err.value) == "test"
Ejemplo n.º 11
0
def test_info_new_map():
    # Seismogram and TimeSeries
    seis = get_live_seismogram()
    assert seis.number_of_stages() == 0
    logging_helper.info(seis, 'dummy_func', '1')
    assert seis.number_of_stages() == 1

    ts = get_live_timeseries()
    assert ts.number_of_stages() == 0
    logging_helper.info(ts, 'dummy_func', '1')
    assert ts.number_of_stages() == 1

    # ensemble
    seis_e = get_live_seismogram_ensemble(3)
    logging_helper.info(seis_e, 'dummy_func', '0')
    for i in range(3):
        assert seis_e.member[i].number_of_stages() == 1

    seis_e = get_live_seismogram_ensemble(3)
    logging_helper.info(seis_e, 'dummy_func', '0', 0)
    assert seis_e.member[0].number_of_stages() == 1

    tse = get_live_timeseries_ensemble(3)
    logging_helper.info(tse, 'dummy_func', '0', 0)
    assert tse.member[0].number_of_stages() == 1
Ejemplo n.º 12
0
def test_detrend():
    ts = get_live_timeseries()
    seis = get_live_seismogram()
    tse = get_live_timeseries_ensemble(3)
    seis_e = get_live_seismogram_ensemble(3)
    detrend(ts, object_history=True, alg_id="0")
    detrend(seis, object_history=True, alg_id="0")
    detrend(tse, object_history=True, alg_id="0")
    detrend(seis_e, object_history=True, alg_id="0")
    detrend(ts, type="linear", object_history=True, alg_id="0")
    detrend(ts, type="constant", object_history=True, alg_id="0")
    detrend(ts, type="polynomial", order=2, object_history=True, alg_id="0")
    detrend(ts,
            type="spline",
            order=2,
            dspline=1000,
            object_history=True,
            alg_id="0")

    # functionality verification testing
    ts = get_live_timeseries()
    tr = obspy.Trace()
    tr.data = np.array(ts.data)
    copy = np.array(ts.data)
    tr.stats.sampling_rate = 20
    tr.detrend(type="simple")
    detrend(ts, "simple", object_history=True, alg_id="0")
    assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data))
    assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy))
Ejemplo n.º 13
0
def test_mspass_func_wrapper():
    with pytest.raises(TypeError) as err:
        dummy_func(1)
    assert (str(err.value) ==
            "mspass_func_wrapper only accepts mspass object as data input")

    with pytest.raises(ValueError) as err:
        seis = get_live_seismogram()
        dummy_func(seis, object_history=True)
    assert (str(err.value) ==
            "dummy_func: object_history was true but alg_id not defined")

    assert "OK" == dummy_func(seis, dryrun=True)

    # default behavior
    assert "dummy" == dummy_func(seis)
    assert seis.number_of_stages() == 0

    # object_history is true
    dummy_func(seis, object_history=True, alg_id="0")
    assert seis.number_of_stages() == 1
    assert len(seis.get_nodes()) == 1
    assert seis.current_nodedata().algorithm == "dummy_func"
    assert seis.current_nodedata().algid == "0"

    # inplace return
    data = dummy_func(seis, inplace_return=True)
    assert isinstance(data, Seismogram)

    # valid function_return_key
    data = dummy_func(seis,
                      inplace_return=True,
                      function_return_key="test_key")
    assert isinstance(data, Seismogram)
    assert "test_key" in data and data["test_key"] == "dummy"

    # invalid function_return_key and not inplace_return
    data = dummy_func(seis, inplace_return=False, function_return_key=dict())
    assert isinstance(data, Seismogram)
    errs = seis.elog.get_error_log()
    assert len(errs) == 2
    assert errs[-1].algorithm == "dummy_func"
    assert (
        errs[-1].message ==
        "Inconsistent arguments; inplace_return was set False and function_return_key was not None.\nAssuming inplace_return == True is correct"
    )
    assert errs[-2].algorithm == "dummy_func"
    assert (
        errs[-2].message ==
        "Illegal type received for function_return_key argument=<class 'dict'>\nReturn value not saved in Metadata"
    )

    # dead object will return immediately
    seis.kill()
    data = dummy_func(seis)
    assert not data.live
    data = dummy_func(seis, inplace_return=True)
    assert not data.live
Ejemplo n.º 14
0
def test_reduce_dead_silent():
    seis = get_live_seismogram()
    assert seis.number_of_stages() == 0
    logging_helper.info(seis, 'dummy_func', '1')
    logging_helper.info(seis, 'dummy_func_2', '2')
    assert seis.number_of_stages() == 2
    seis.kill()
    seis2 = get_live_seismogram()
    assert seis2.number_of_stages() == 0
    logging_helper.reduce(seis2, seis, 'reduce', '3')
    assert len(seis2.get_nodes()) == 3

    seis = get_live_seismogram()
    seis2 = get_live_seismogram()
    logging_helper.info(seis, 'dummy_func', '1')
    logging_helper.info(seis, 'dummy_func_2', '2')
    seis2.kill()
    logging_helper.reduce(seis2, seis, 'reduce', '3')
    assert len(seis2.get_nodes()) == 0
Ejemplo n.º 15
0
def test_reduce_dead_silent():
    seis = get_live_seismogram()
    assert seis.number_of_stages() == 0
    logging_helper.info(seis, "1", "dummy_func")
    logging_helper.info(seis, "2", "dummy_func_2")
    assert seis.number_of_stages() == 2
    seis.kill()
    seis2 = get_live_seismogram()
    assert seis2.number_of_stages() == 0
    logging_helper.reduce(seis2, seis, "3", "reduce")
    assert len(seis2.get_nodes()) == 3

    seis = get_live_seismogram()
    seis2 = get_live_seismogram()
    logging_helper.info(seis, "1", "dummy_func")
    logging_helper.info(seis, "2", "dummy_func_2")
    seis2.kill()
    logging_helper.reduce(seis2, seis, "3", "reduce")
    assert len(seis2.get_nodes()) == 0
Ejemplo n.º 16
0
def test_filter():
    ts = get_live_timeseries()
    seis = get_live_seismogram()
    tse = get_live_timeseries_ensemble(3)
    seis_e = get_live_seismogram_ensemble(3)
    filter(ts,
           "bandpass",
           freqmin=1,
           freqmax=5,
           object_history=True,
           alg_id="0")
    filter(seis,
           "bandpass",
           freqmin=1,
           freqmax=5,
           object_history=True,
           alg_id="0")
    filter(tse,
           "bandpass",
           freqmin=1,
           freqmax=5,
           object_history=True,
           alg_id="0")
    filter(seis_e,
           "bandpass",
           freqmin=1,
           freqmax=5,
           object_history=True,
           alg_id="0")
    filter(ts, "bandstop", freqmin=1, freqmax=5)
    filter(ts, "lowpass", freq=1)
    filter(ts, "highpass", freq=1)
    filter(ts, "lowpass_cheby_2", freq=1)

    # fixme fix testing warning
    # filter(ts, "lowpass_fir", freq=10) these two types are not supported
    # filter(ts, "remez_fir", freqmin=10, freqmax=20)

    # functionality verification testing
    ts = get_live_timeseries()
    tr = obspy.Trace()
    tr.data = np.array(ts.data)
    copy = np.array(ts.data)
    tr.stats.sampling_rate = 20
    tr.filter("bandpass", freqmin=1, freqmax=5)
    filter(ts,
           "bandpass",
           freqmin=1,
           freqmax=5,
           object_history=True,
           alg_id="0")
    assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data))
    assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy))
Ejemplo n.º 17
0
def test_reduce_functionality():
    # Seismogram and TimeSeries
    seis = get_live_seismogram()
    assert seis.number_of_stages() == 0
    logging_helper.info(seis, 'dummy_func', '1')
    logging_helper.info(seis, 'dummy_func_2', '2')
    assert seis.number_of_stages() == 2
    seis2 = get_live_seismogram()
    assert seis2.number_of_stages() == 0
    logging_helper.reduce(seis2, seis, 'reduce', '3')
    assert len(seis2.get_nodes()) == 3

    ts = get_live_timeseries()
    ts2 = get_live_timeseries()
    assert ts.number_of_stages() == 0
    logging_helper.info(ts, 'dummy_func', '1')
    logging_helper.info(ts, 'dummy_func', '2')
    assert ts.number_of_stages() == 2
    logging_helper.reduce(ts2, ts, 'reduce', '3')
    assert len(ts2.get_nodes()) == 3

    # ensemble
    seis_e = get_live_seismogram_ensemble(3)
    seis_e2 = get_live_seismogram_ensemble(3)
    logging_helper.info(seis_e, 'dummy_func', '0')
    logging_helper.info(seis_e, 'dummy_func', '1')
    logging_helper.info(seis_e, 'dummy_func', '2')
    logging_helper.reduce(seis_e2, seis_e, "reduce", "3")
    for i in range(3):
        assert len(seis_e2.member[i].get_nodes()) == 4

    tse = get_live_timeseries_ensemble(3)
    tse2 = get_live_timeseries_ensemble(3)
    logging_helper.info(tse, 'dummy_func', '0')
    logging_helper.info(tse, 'dummy_func', '1')
    logging_helper.info(tse, 'dummy_func', '2')
    logging_helper.reduce(tse2, tse, "reduce", "3")
    for i in range(3):
        assert len(tse2.member[i].get_nodes()) == 4
Ejemplo n.º 18
0
def test_info_not_live():
    # Seismogram and TimeSeries
    seis = get_live_seismogram()
    seis.kill()
    assert seis.number_of_stages() == 0
    logging_helper.info(seis, 'dummy_func', '1')
    assert seis.number_of_stages() == 0

    # ensemble
    seis_e = get_live_seismogram_ensemble(3)
    assert seis_e.member[0].number_of_stages() == 0
    seis_e.member[0].kill()
    logging_helper.info(seis_e, 'dummy_func', '0', 0)
    assert seis_e.member[0].number_of_stages() == 0
Ejemplo n.º 19
0
def test_interpolate():
    ts = get_live_timeseries()
    seis = get_live_seismogram()
    tse = get_live_timeseries_ensemble(3)
    seis_e = get_live_seismogram_ensemble(3)
    interpolate(ts, 255, object_history=True, alg_id="0")
    interpolate(seis, 255, object_history=True, alg_id="0")
    interpolate(tse, 255, object_history=True, alg_id="0")
    interpolate(seis_e, 255, object_history=True, alg_id="0")
    interpolate(ts,
                255,
                method="lanczos",
                a=20,
                object_history=True,
                alg_id="0")
    ts = get_live_timeseries()
    interpolate(ts, 25, method="slinear", object_history=True, alg_id="0")
    ts = get_live_timeseries()
    interpolate(ts, 255, method="linear", object_history=True, alg_id="0")
    ts = get_live_timeseries()
    interpolate(ts, 255, method="nearest", object_history=True, alg_id="0")
    ts = get_live_timeseries()
    interpolate(ts, 255, method="zero", object_history=True, alg_id="0")

    # functionality verification testing
    ts = get_sin_timeseries()
    tr = obspy.Trace()
    tr.data = np.array(ts.data)
    copy = np.array(ts.data)
    tr.stats.sampling_rate = 20
    tr.interpolate(40, method="linear", npts=500)
    interpolate(ts,
                40,
                method="linear",
                npts=500,
                object_history=True,
                alg_id="0")
    assert len(ts.data) == len(tr.data)
    assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data))
    assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy))
    assert ts.dt == 1 / 40
Ejemplo n.º 20
0
def test_interpolate():
    ts = get_live_timeseries()
    seis = get_live_seismogram()
    tse = get_live_timeseries_ensemble(3)
    seis_e = get_live_seismogram_ensemble(3)
    interpolate(ts, 255, preserve_history=True, instance='0')
    interpolate(seis, 255, preserve_history=True, instance='0')
    interpolate(tse, 255, preserve_history=True, instance='0')
    interpolate(seis_e, 255, preserve_history=True, instance='0')
    interpolate(ts,
                255,
                method='lanczos',
                a=20,
                preserve_history=True,
                instance='0')
    ts = get_live_timeseries()
    interpolate(ts, 25, method='slinear', preserve_history=True, instance='0')
    ts = get_live_timeseries()
    interpolate(ts, 255, method='linear', preserve_history=True, instance='0')
    ts = get_live_timeseries()
    interpolate(ts, 255, method='nearest', preserve_history=True, instance='0')
    ts = get_live_timeseries()
    interpolate(ts, 255, method='zero', preserve_history=True, instance='0')

    # functionality verification testing
    ts = get_sin_timeseries()
    tr = obspy.Trace()
    tr.data = np.array(ts.data)
    copy = np.array(ts.data)
    tr.stats.sampling_rate = 20
    tr.interpolate(40, method="linear", npts=500)
    interpolate(ts,
                40,
                method='linear',
                npts=500,
                preserve_history=True,
                instance='0')
    assert len(ts.data) == len(tr.data)
    assert all(abs(a - b) < 0.001 for a, b in zip(ts.data, tr.data))
    assert not all(abs(a - b) < 0.001 for a, b in zip(ts.data, copy))
    assert ts.dt == 1 / 40
Ejemplo n.º 21
0
def test_is_input_dead():
    seis = get_live_seismogram()
    assert False == is_input_dead(seis)
    assert False == is_input_dead(any=seis)
    seis.kill()
    assert True == is_input_dead(seis)
    assert True == is_input_dead(any=seis)

    ts = get_live_timeseries()
    assert False == is_input_dead(ts)
    assert False == is_input_dead(any=ts)
    ts.kill()
    assert True == is_input_dead(ts)
    assert True == is_input_dead(any=ts)

    seis_e = get_live_seismogram_ensemble(3)
    assert False == is_input_dead(seis_e)
    assert False == is_input_dead(any=seis_e)
    seis_e.member[0].kill()
    assert False == is_input_dead(seis_e)
    assert False == is_input_dead(any=seis_e)
    seis_e.member[1].kill()
    seis_e.member[2].kill()
    assert True == is_input_dead(seis_e)
    assert True == is_input_dead(any=seis_e)

    tse = get_live_timeseries_ensemble(3)
    assert False == is_input_dead(tse)
    assert False == is_input_dead(any=tse)
    tse.member[0].kill()
    assert False == is_input_dead(tse)
    assert False == is_input_dead(any=tse)
    tse.member[1].kill()
    tse.member[2].kill()
    assert True == is_input_dead(tse)
    assert True == is_input_dead(any=tse)
Ejemplo n.º 22
0
    # zero = get_live_timeseries()
    # zero.data = DoubleVector(np.zeros(255))
    res = data.reduce(
        lambda a, b: stack(a, b, object_history=True, alg_id="3"))
    return res


def test_reduce_dask_spark(spark_context):
    l = [get_live_timeseries() for i in range(5)]
    res = np.zeros(255)
    for i in range(5):
        for j in range(255):
            res[j] = res[j] + l[i].data[j]
    spark_res = spark_reduce(l, spark_context)
    dask_res = dask_reduce(l)
    assert np.isclose(res, dask_res.data).all()
    assert np.isclose(res, spark_res.data).all()
    assert len(res) == len(spark_res.data)


if __name__ == "__main__":
    # test_reduce_stack()
    a1 = get_live_seismogram()
    a2 = get_live_seismogram()
    print(a1.data[0, 0])
    print(a2.data[0, 0])
    print(a1.t0, a1.endtime())
    print(a2.t0, a2.endtime())
    a1 += a2
    print(a1.data[0, 0])
Ejemplo n.º 23
0
    def test_mspass_map_with_filePath(self, spark_context):
        # test mapass_map for spark (file input)
        # data input of RFdecon, needed for parallelization
        d = [get_live_seismogram(71, 2.0) for i in range(5)]
        for i in range(5):
            d[i].t0 = -5

        # parameters string
        pfPath = "python/mspasspy/data/pf/RFdeconProcessor.pf"
        pf = AntelopePf(pfPath)
        pf_dict = AntelopePf2dict(pf)
        parameter_dict = collections.OrderedDict()
        parameter_dict["alg"] = "LeastSquares"
        parameter_dict["pf"] = pf_dict
        parameter_dict["object_history"] = "True"
        gTree = ParameterGTree(parameter_dict)
        json_params = json.dumps(gTree.asdict())

        data = spark_context.parallelize(d)
        spark_res = data.mspass_map(
            RFdecon,
            alg="LeastSquares",
            pf=pfPath,
            object_history=True,
            global_history=self.manager,
            alg_name=None,
            parameters=None,
        ).collect()
        manager_db = Database(self.client, "test_manager")
        assert (manager_db["history_global"].count_documents(
            {"job_name": self.manager.job_name}) == 8)
        res = manager_db["history_global"].find_one({"alg_name": "RFdecon"})
        assert res["job_id"] == self.manager.job_id
        assert res["job_name"] == self.manager.job_name
        assert res["alg_name"] == "RFdecon"
        assert res["parameters"] == json_params
        spark_alg_id = res["alg_id"]

        # test mspass_map for dask
        ddb = daskbag.from_sequence(d)
        dask_res = ddb.mspass_map(
            RFdecon,
            alg="LeastSquares",
            pf=pfPath,
            object_history=True,
            global_history=self.manager,
            alg_name=None,
            parameters=None,
        ).compute()

        assert (manager_db["history_global"].count_documents(
            {"job_name": self.manager.job_name}) == 9)
        assert (manager_db["history_global"].count_documents(
            {"alg_id": spark_alg_id}) == 2)
        docs = manager_db["history_global"].find({"alg_id": spark_alg_id})
        assert docs[0]["job_id"] == docs[1]["job_id"] == self.manager.job_id
        assert docs[0]["job_name"] == docs[1][
            "job_name"] == self.manager.job_name
        assert docs[0]["alg_name"] == docs[1]["alg_name"] == "RFdecon"
        assert docs[0]["parameters"] == docs[1]["parameters"] == json_params
        assert not docs[0]["time"] == docs[1]["time"]

        # same alg + parameters combination -> same alg_id
        ddb = daskbag.from_sequence(d)
        dask_res = ddb.mspass_map(
            RFdecon,
            alg="LeastSquares",
            pf=pfPath,
            object_history=True,
            global_history=self.manager,
            alg_name=None,
            parameters=None,
        ).compute()
        assert (manager_db["history_global"].count_documents(
            {"job_name": self.manager.job_name}) == 10)
        assert (manager_db["history_global"].count_documents(
            {"alg_id": spark_alg_id}) == 3)

        # SPARK test user provided alg_name and parameter(exist)
        spark_alg_name = "RFdecon"
        spark_alg_parameters = (
            "alg=LeastSquares, pf={pfPath}, object_history=True".format(
                pfPath=pfPath))
        data = spark_context.parallelize(d)
        spark_res = data.mspass_map(
            RFdecon,
            alg="LeastSquares",
            pf=pfPath,
            object_history=True,
            global_history=self.manager,
            alg_name=spark_alg_name,
            parameters=spark_alg_parameters,
        ).collect()
        assert (manager_db["history_global"].count_documents(
            {"job_name": self.manager.job_name}) == 11)
        assert (manager_db["history_global"].count_documents(
            {"alg_id": spark_alg_id}) == 4)

        # SPARK test user provided alg_name and parameter(new)
        spark_alg_name = "RFdecon_2"
        spark_alg_parameters = (
            "alg=LeastSquares, pf={pfPath}, object_history=True".format(
                pfPath=pfPath))
        data = spark_context.parallelize(d)
        spark_res = data.mspass_map(
            RFdecon,
            alg="LeastSquares",
            pf=pfPath,
            object_history=True,
            global_history=self.manager,
            alg_name=spark_alg_name,
            parameters=spark_alg_parameters,
        ).collect()
        assert (manager_db["history_global"].count_documents(
            {"job_name": self.manager.job_name}) == 12)
        assert (manager_db["history_global"].count_documents(
            {"alg_name": "RFdecon_2"}) == 1)
        res = manager_db["history_global"].find_one({"alg_name": "RFdecon_2"})
        assert res["job_id"] == self.manager.job_id
        assert res["job_name"] == self.manager.job_name
        assert res["alg_name"] == "RFdecon_2"
        assert res["parameters"] == json_params
        new_spark_alg_id = res["alg_id"]
        assert (manager_db["history_global"].count_documents(
            {"alg_id": new_spark_alg_id}) == 1)

        # DASK test user provided alg_name and parameter(exist)
        dask_alg_name = "RFdecon"
        dask_alg_parameters = (
            "alg=LeastSquares, pf={pfPath}, object_history=True".format(
                pfPath=pfPath))
        ddb = daskbag.from_sequence(d)
        dask_res = ddb.mspass_map(
            RFdecon,
            alg="LeastSquares",
            pf=pfPath,
            object_history=True,
            global_history=self.manager,
            alg_name=dask_alg_name,
            parameters=dask_alg_parameters,
        ).compute()
        assert (manager_db["history_global"].count_documents(
            {"job_name": self.manager.job_name}) == 13)
        assert (manager_db["history_global"].count_documents(
            {"alg_id": spark_alg_id}) == 5)

        # DASK test user provided alg_name and parameter(new)
        dask_alg_name = "RFdecon_3"
        dask_alg_parameters = (
            "alg=LeastSquares, pf={pfPath}, object_history=True".format(
                pfPath=pfPath))
        ddb = daskbag.from_sequence(d)
        dask_res = ddb.mspass_map(
            RFdecon,
            alg="LeastSquares",
            pf=pfPath,
            object_history=True,
            global_history=self.manager,
            alg_name=dask_alg_name,
            parameters=dask_alg_parameters,
        ).compute()
        assert (manager_db["history_global"].count_documents(
            {"job_name": self.manager.job_name}) == 14)
        assert (manager_db["history_global"].count_documents(
            {"alg_name": "RFdecon_3"}) == 1)
        res = manager_db["history_global"].find_one({"alg_name": "RFdecon_3"})
        assert res["job_id"] == self.manager.job_id
        assert res["job_name"] == self.manager.job_name
        assert res["alg_name"] == "RFdecon_3"
        assert res["parameters"] == json_params
        new_dask_alg_id = res["alg_id"]
        assert (manager_db["history_global"].count_documents(
            {"alg_id": new_dask_alg_id}) == 1)