Ejemplo n.º 1
0
def test_reindex_no_rate():
    data = DummyData()
    node = Reindex()
    with pytest.raises(WorkerInterrupt):
        node.i.data = data.next()
        node.update()
    with pytest.raises(WorkerInterrupt):
        node.i.data = data.next()
        node.i.meta = {}
        node.update()
Ejemplo n.º 2
0
def test_idle_buffer_2D(random):
    start = now() - pd.Timedelta('10s')
    stream = DummyData(start_date=start, rate=1, jitter=0)
    node = Pipeline(steps=dummy_classifier, buffer_size='5s')
    node.i_training.data = stream.next(10)
    node.update()
    assert len(node._X_train_indices) == 4
    node.i_training.data = stream.next(10)
    node.update()
    assert len(node._X_train_indices) == 14
    assert len(node._X_train_indices) == len(node._X_train)
Ejemplo n.º 3
0
def test_accumulate_y_train(caplog):
    node = Pipeline(steps=dummy_classifier)
    stream = DummyData(start_date=now())
    node.i_training_0.data = stream.next()
    node.i_training_1.data = stream.next()
    node.i_training_2.data = stream.next()
    node.i_training_0.meta = { 'epoch': { 'context': { 'target': True }}}
    node.i_training_1.meta = {}
    node.i_training_2.meta = { 'epoch': { 'context': { 'target': False }}}
    node.update()
    assert node._y_train.tolist() == [True, False]
    assert caplog.record_tuples[0][2] =='Invalid label'
Ejemplo n.º 4
0
def test_reindex_indices():
    data = DummyData()
    node = Reindex(rate=10)
    node.i.data = data.next()
    node.update()
    assert node.o.data.index.values[0] == np.datetime64(
        "2017-12-31 23:59:59.998745401")
    assert node.o.data.index.values[-1] == np.datetime64(
        "2018-01-01 00:00:00.898745401")
    node.i.data = data.next()
    node.update()
    assert node.o.data.index.values[0] == np.datetime64(
        "2018-01-01 00:00:00.998745401")
    assert node.o.data.index.values[-1] == np.datetime64(
        "2018-01-01 00:00:01.898745401")
Ejemplo n.º 5
0
def test_passthrough():
    node = Pipeline(steps=dummy_classifier, passthrough=True)
    streamer = DummyData()
    node.i_training.data = streamer.next()
    node.i_training_0.data = streamer.next()
    node.i_events.data = make_event('foobar')
    node.i.data = streamer.next()
    node.i_0.data = streamer.next()
    node.i_1.data = streamer.next()
    node.i.meta = {'foobar': 42}
    node.update()
    assert len(list(node.iterate('o*'))) == 3
    assert node.o.data.equals(node.i.data)
    assert node.o_0.data.equals(node.i_0.data)
    assert node.o_0.data.equals(node.i_0.data)
    assert node.o.meta == node.i.meta
Ejemplo n.º 6
0
def test_reindex_rate_meta():
    data = DummyData()
    node = Reindex()
    node.i.data = data.next()
    node.i.meta = {"rate": 10}
    node.update()
    assert node._rate == 10
    assert node.o.meta["rate"] == 10
Ejemplo n.º 7
0
def test_predict_3D_output():
    node = Pipeline(steps=dummy_classifier, mode='predict', meta_label='target')
    stream = DummyData(start_date=now())
    node.i_training_0.data = stream.next(5)
    node.i_training_1.data = stream.next(5)
    node.i_training_0.meta = { 'target': 0 }
    node.i_training_1.meta = { 'target': 1 }
    node.i_events.data = make_event('training_starts')
    while node._status != 3:
        node.update()
    node.i_0.data = stream.next(5)
    node.i_1.data = stream.next(5)
    node.i_0.meta = {'index': 0}
    node.i_1.meta = {'index': 1}
    node.update()
    assert len(node.o_events.data) == 2
    assert node.o_events.meta == {'epochs': [{'index': 0}, {'index': 1}]}
Ejemplo n.º 8
0
def test_accumulate_start_2D(random):
    node = Pipeline(steps=dummy_classifier, buffer_size='5s')
    start = now()
    node.i_events.set([['accumulation_starts', '']], [start], ['label', 'data'])
    stream = DummyData(start_date=start, rate=1, jitter=0)
    node.i_training.data = stream.next(100)
    node.update()
    assert len(node._X_train) == 100
Ejemplo n.º 9
0
def test_predict_2D_output(random):
    classifier = [{'module': 'test_ml', 'class': 'DummyClassifierUnsupervised'}]
    node = Pipeline(steps=classifier, mode='fit_predict', meta_label=None)
    stream = DummyData(start_date=now())
    node.i.data = stream.next(5)
    node.i.meta = {'foo': 'bar'}
    node.update()
    assert len(node.o_events.data) == 5
    assert node.o_events.meta == node.i.meta
Ejemplo n.º 10
0
def test_accumulate_start_stop_2D(random):
    node = Pipeline(steps=dummy_classifier, buffer_size='5s')
    start = now()
    events = [
        ['accumulation_starts', ''],
        ['accumulation_stops', '']
    ]
    times = pd.date_range(start=start, periods=2, freq='10s')
    node.i_events.set(events, times, ['label', 'data'])
    stream = DummyData(start_date=start, rate=1, jitter=0)
    node.i_training.data = stream.next(100)
    node.update()
    assert len(node._X_train) == 10
Ejemplo n.º 11
0
def test_transform_3D_output(random):
    pipeline = [
        {'module': 'test_ml', 'class': 'Vectorizer'},
        {'module': 'test_ml', 'class': 'DummyTransformer'},
        {'module': 'test_ml', 'class': 'Shaper', 'args': { 'shape': (2, -1, 5) }}
    ]
    node = Pipeline(steps=pipeline, mode='fit_transform', meta_label=None)
    columns = ['A', 'B', 'C', 'D', 'E']
    stream = DummyData(start_date=now())
    node.i_0.data = stream.next()
    node.i_1.data = stream.next()
    node.i_0.data.columns = columns
    node.i_1.data.columns = columns
    node.i_0.meta = {'index': 0}
    node.i_1.meta = {'index': 1}
    node.update()
    assert len(list(node.iterate('o_*'))) == 2
    assert np.array_equal(node.i_0.data.index.values, node.o_0.data.index.values)
    assert list(node.i_0.data.columns) == columns
    assert list(node.i_1.data.columns) == columns
    assert node.o_0.meta == node.i_0.meta
    assert node.o_1.meta == node.i_1.meta
Ejemplo n.º 12
0
"""Tests for nodes from timeflux_dsp.nodes.spectral"""

import numpy as np
import pandas as pd
import pytest
import xarray as xr
from timeflux.helpers.testing import DummyData

from timeflux_dsp.nodes.spectral import FFT

fs = 10

data = DummyData(rate=fs, jitter=0.05)
all_data = data.next(50)


def test_welch():
    data.reset()

    node = FFT(fs=fs, return_onesided=False)
    node.i.data = data.next(5)

    node.update()
    expected_freqs = [0.0, 2.0, 4.0, -4.0, -2.0]
    expected_times = [pd.Timestamp("2018-01-01 00:00:00.396560186")]
    expected_data = np.array([
        [
            2.687793 + 0.0j,
            2.69977 + 0.0j,
            4.158542 + 0.0j,
            2.907866 + 0.0j,