Esempio n. 1
0
def test_reduce_stack():
    seis1 = get_live_seismogram()
    seis2 = get_live_seismogram()
    seis_cp = np.array(seis1.data)
    stack(seis1, seis2)
    res = np.add(np.array(seis_cp), np.array(seis2.data))
    for i in range(3):
        assert np.isclose(seis1.data[i], res[i]).all()  # fixme

    ts1 = get_live_timeseries()
    ts2 = get_live_timeseries()
    ts1_cp = np.array(ts1.data)
    stack(ts1, ts2)
    assert np.isclose(ts1.data, (np.array(ts1_cp) + np.array(ts2.data))).all()

    tse1 = get_live_timeseries_ensemble(2)
    tse2 = get_live_timeseries_ensemble(2)
    tse1_cp = TimeSeriesEnsemble(tse1)
    stack(tse1, tse2)
    for i in range(2):
        assert np.isclose(
            tse1.member[i].data,
            np.add(np.array(tse1_cp.member[i].data),
                   np.array(tse2.member[i].data)),
        ).all()

    seis_e1 = get_live_seismogram_ensemble(2)
    seis_e2 = get_live_seismogram_ensemble(2)
    seis_e1_cp = SeismogramEnsemble(seis_e1)
    stack(seis_e1, seis_e2)
    for i in range(2):
        res = np.add(np.array(seis_e1_cp.member[i].data),
                     np.array(seis_e2.member[i].data))
        for j in range(3):
            assert np.isclose(seis_e1.member[i].data[j], res[j]).all()  # fixme
Esempio n. 2
0
def spark_reduce(input, sc):
    data = sc.parallelize(input)
    # zero = get_live_timeseries()
    # zero.data = DoubleVector(np.zeros(255))
    res = data.reduce(
        lambda a, b: stack(a, b, object_history=True, alg_id="3"))
    return res
Esempio n. 3
0
def spark_reduce(input, sc):
    data = sc.parallelize(input)
    # zero = get_live_timeseries()
    # zero.data = DoubleVector(np.zeros(255))
    res = data.reduce(
        lambda a, b: stack(a, b, preserve_history=True, instance='3'))
    return res
Esempio n. 4
0
def dask_reduce(input):
    ddb = db.from_sequence(input)
    res = ddb.fold(lambda a, b: stack(a, b, object_history=True, alg_id="3"))
    return res.compute()
Esempio n. 5
0
def test_reduce_stack_exception():
    tse1 = get_live_timeseries_ensemble(2)
    tse2 = get_live_timeseries_ensemble(3)
    with pytest.raises(IndexError) as err:
        stack(tse1, tse2)
    assert str(err.value) == "data1 and data2 have different sizes of member"
Esempio n. 6
0
def dask_reduce(input):
    ddb = db.from_sequence(input)
    res = ddb.fold(
        lambda a, b: stack(a, b, preserve_history=True, instance='3'))
    return res.compute()