Пример #1
0
def compute_hourly_mean(config, data):
    feature = data.pipe(
        rs.state.with_memory_store(
            rs.ops.group_by(
                lambda i: i['group_id'],
                pipeline=rx.pipe(
                    rs.data.split(
                        lambda i: i['event_time'].hour,
                        pipeline=rx.pipe(
                            rs.ops.tee_map(
                                rs.ops.last(),
                                rx.pipe(
                                    rs.ops.map(lambda i: i['data']['field2']),
                                    rs.math.mean(reduce=True),
                                )),
                            rs.ops.map(lambda i: Feature(
                                event_id=i[0]['event_id'],
                                event_time=i[0]['event_time'],
                                group_id=i[0]['group_id'],
                                field2_mean=i[1],
                            )),
                            clip_hour(),
                        )), )), ), )

    return feature,
Пример #2
0
    def pipe(self, *pipelines: "Pipeline") -> "Pipeline":
        """

        Can be used as a class or instance method to create a new pipeline chain

        Args:
            pipelines: variable number of pipelines

        Returns:
            newly composed Pipeline instance
        """

        # called as instance method
        if not isinstance(self, type):
            parent = self
            return type(
                "Pipeline",
                (Pipeline, ),
                {
                    "transform":
                    lambda self: rx.pipe(parent.transform(), *
                                         [p.transform() for p in pipelines])
                },
            )()

        # called as class method
        else:
            return type(
                "Pipeline",
                (Pipeline, ),
                {
                    "transform":
                    lambda self: rx.pipe(*[p.transform() for p in pipelines])
                },
            )()
Пример #3
0
def test_tee_map_roll_sum():
    Item = namedtuple('Item', ['group', 'a', 'b'])
    source = [
        Item('a', 0, 1000),
        Item('b', 1, 10001),
        Item('a', 1, 1001),
        Item('a', 2, 1002),
        Item('b', 2, 10002),
        Item('b', 3, 10003),
        Item('a', 3, 1003),
        Item('a', 4, 1004),
        Item('b', 4, 10004),
        Item('a', 5, 1005),
        Item('b', 5, 10005),
        Item('b', 6, 10006),
    ]

    actual_result = []
    actual_error = []

    def on_next(i):
        actual_result.append(i)

    store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
    rx.from_(source).pipe(
        rs.state.with_store(
            store,
            rx.pipe(
                rs.ops.group_by(
                    lambda i: i.group,
                    rx.pipe(
                        rs.data.roll(window=3,
                                     stride=2,
                                     pipeline=rx.pipe(
                                         rs.ops.tee_map(
                                             rx.pipe(
                                                 rs.ops.map(lambda i: i.a),
                                                 rs.math.sum(reduce=True),
                                             ),
                                             rx.pipe(
                                                 rs.ops.map(lambda i: i.b),
                                                 rs.math.sum(reduce=True),
                                             )), )), )))), ).subscribe(
                                                 on_next=on_next,
                                                 on_error=actual_error.append)

    if len(actual_error) > 0:
        import traceback
        traceback.print_tb(actual_error[0].__traceback__)
    assert len(actual_error) == 0
    assert actual_result == [
        (3.0, 3003.0),
        (6.0, 30006.0),
        (9.0, 3009.0),
        (12.0, 30012.0),
        (9.0, 2009.0),
        (11.0, 20011.0),
    ]
def spectrum(mode):
    if mode:
        return pipe(
            ops.map(lambda img: np.fft.fft2(img)),
            ops.map(lambda img: np.fft.fftshift(img)),
        )
    elif not mode:
        return pipe(
            ops.map(lambda img: np.fft.fftshift(img)),
            ops.map(lambda img: np.fft.fft2(img)),
        )
Пример #5
0
def test_forward_topology_probe():
    actual_topology_probe = []
    source = [1, 2, 3, 4]

    rx.from_(source).pipe(
        rs.state.with_memory_store(
            rx.pipe(
                rs.ops.group_by(lambda i: i % 2 == 0, pipeline=rx.pipe()),
                on_probe_state_topology(actual_topology_probe.append),
            )), ).subscribe()

    assert len(actual_topology_probe) == 1
Пример #6
0
def test_forward_topology_probe():
    actual_topology_probe = []
    source = [1, 2, 3, 4]

    rx.from_(source).pipe(
        rs.state.with_memory_store(
            rx.pipe(
                rs.data.split(lambda i: 1, rx.pipe()),
                on_probe_state_topology(actual_topology_probe.append),
            )), ).subscribe()

    assert len(actual_topology_probe) == 1
Пример #7
0
def test_groupby_roll_sum():
    source = [
        ('a', 1),
        ('a', 2),
        ('b', 10),
        ('a', 3),
        ('b', 20),
        ('b', 30),
        ('a', 4),
        ('b', 40),
        ('a', 5),
        ('a', 6),
        ('b', 50),
        ('a', 7),
        ('a', 8),
        ('a', 9),
        ('a', 10),
    ]

    actual_result = []

    def on_next(i):
        actual_result.append(i)

    rx.from_(source).pipe(
        rs.state.with_memory_store(
            rx.pipe(
                rs.ops.group_by(
                    lambda i: i[0],
                    rx.pipe(
                        rs.data.roll(window=3,
                                     stride=2,
                                     pipeline=rx.pipe(
                                         rs.ops.tee_map(
                                             rx.pipe(
                                                 rs.ops.map(lambda i: i[0]), ),
                                             rx.pipe(
                                                 rs.ops.map(lambda i: i[1]),
                                                 rs.math.sum(reduce=True),
                                             )), )), )))), ).subscribe(
                                                 on_next=on_next,
                                                 on_error=lambda e: print(e))

    assert actual_result == [
        ('a', 6.0),
        ('b', 60.0),
        ('a', 12.0),
        ('b', 120.0),
        ('a', 18.0),
        ('a', 24.0),
        ('a', 19.0),
        ('b', 50.0),
    ]
Пример #8
0
    def test_import_identity_e2e(self):
        self.gaiaRef = Gaia.login("http://localhost:8080", UsernamePasswordCredentials("admin", "admin"))
        data = bytes("identity content", encoding="utf-8")
        result = pipe(ops.first())(
            self.gaiaRef.identity().import_identity('2fa4ff18-5c30-497b-9ad2-0d8eb51cd4da',
                                                    'identityName', data, True)).run()
        self.assertEqual(len(result.uri), 72)

        result = pipe(ops.first())(
            self.gaiaRef.data("gaia://2fa4ff18-5c30-497b-9ad2-0d8eb51cd4da/").list()).run()
        self.assertEqual(result[0].dictionary, {'filePath': 'identities/identityName',
                                                'tenant': '2fa4ff18-5c30-497b-9ad2-0d8eb51cd4da'})
        pass
Пример #9
0
    def test_does_not_block(self):
        gaia_ref = mock_gaia_ref(self.return_after_seconds(5))

        def config(x):
            x.identity_id()
            x.qualifier()

        t0 = time.perf_counter()
        pipe(ops.first())(gaia_ref.retrieve_intents(str(uuid4()), config))
        t1 = time.perf_counter()
        exec_time = t1 - t0

        self.assertLess(exec_time, 5)
Пример #10
0
 def _scan(source):
     if isinstance(source, rs.MuxObservable):
         return scan_mux(accumulator, seed, reduce)(source)
     else:
         _seed = seed() if callable(seed) else seed
         if reduce is False:
             return rx.pipe(
                 ops.scan(accumulator, _seed),
                 ops.default_if_empty(default_value=_seed),
             )(source)
         else:
             return rx.pipe(
                 ops.scan(accumulator, _seed),
                 ops.last_or_default(default_value=_seed),
             )(source)
Пример #11
0
    def test_perceive(self):
        gaia_ref = mock_gaia_ref(lambda x: MockResponse({
            "data": {
                "perceive": {
                    "perceiveData": {
                        "id": "asdf"
                    },
                    "perceiveAction": {
                        "id": "qwer"
                    }
                }
            }
        }))
        impulse1 = PerceiveActionImpulse(False, str(uuid.uuid4()), "", {})
        impulse2 = PerceiveDataImpulse(str(uuid.uuid4()), "", {})

        def perceive(p: Perception):
            p.perceive_action(impulse1, lambda x: x.id())
            p.perceive_data(impulse2, lambda x: x.id())

        result = pipe(ops.first())(gaia_ref.perceive(perceive)).run()
        perceiveData = result.dictionary.get("perceiveData")
        perceiveAction = result.dictionary.get("perceiveAction")
        assert perceiveData.get(
            "id") is not None, "PerceiveData.id is in response"
        assert perceiveAction.get(
            "id") is not None, "perceiveAction.id is in response"
Пример #12
0
def most_critical():
    """
    Maps an observable sequence of a sequence of timestamp of BasicHealthModel to an
    observable sequence of BasicHealthModel with the most critical health status. If there
    are multiple BasicHealthModel with the same criticality, the most recent item is
    chosen.
    """
    def criticality(health_status: HealthStatus):
        """
        Converts a health status into an int such that less healthy > more healthy.
        """
        if health_status == HealthStatus.HEALTHY:
            return 0
        if health_status == HealthStatus.UNHEALTHY:
            return 1
        if health_status == HealthStatus.DEAD:
            return 2
        raise Exception("unknown health status")

    def get_most_critical(health_statuses: Sequence[Timestamp]):
        """
        :param health_status: Sequence[Timestamp[HealthStatus]]
        """
        most_crit = health_statuses[0]
        for health in health_statuses:
            cur = criticality(most_crit.value.health_status)
            other = criticality(health.value.health_status)
            if other > cur:
                most_crit = health
            elif other == cur:
                if health.timestamp > most_crit.timestamp:
                    most_crit = health
        return most_crit.value

    return rx.pipe(ops.map(get_most_critical))
Пример #13
0
def preprocess(res):
    return pipe(
        ops.map(lambda img: Image.fromarray(img)),
        ops.map(lambda img: img.resize(res)),
        ops.map(lambda img: img.convert('L')),
        ops.map(lambda img: np.flipud(np.fliplr(img))),
    )
Пример #14
0
def distinct_until_changed(key_mapper=None):
    """Returns an observable sequence that contains only distinct
    contiguous items according to the key_mapper.

    .. marble::
        :alt: distinct_until_changed

        -0-1-1-2-3-1-2-2-3-|
        [    distinct()    ]
        -0-1---2-3-1-2---3-|

    Source:
        An Observable or a MuxObservable

    Args:
        key_mapper: [Optional]  A function to compute the comparison
            key for each element.

    Returns:
        An observable emitting only the distinct contiguous items.
    """
    def _distinct(acc, i):
        key = i
        if key_mapper:
            key = key_mapper(i)

        if key != acc[2]:
            return (True, i, key)
        return (False, i, key)

    return rx.pipe(
        rs.ops.scan(_distinct, seed=(False, None, None)),
        rs.ops.filter(lambda i: i[0] is True),
        rs.ops.map(lambda i: i[1]),
    )
Пример #15
0
def test_split_without_store():
    actual_error = []

    rx.from_([1, 2, 3, 4]).pipe(rs.data.split(
        lambda i: i[-1], rx.pipe()), ).subscribe(on_error=actual_error.append)

    assert type(actual_error[0]) is ValueError
Пример #16
0
def test_router_on_mux_error():
    source = [1, 0, 2, 0]
    actual_result = []
    actual_error = []
    errors_result = []
    errors_error = []

    errors, route_errors = rs.error.create_error_router()

    data = rx.from_(source).pipe(
        rs.ops.multiplex(rx.pipe(
            rs.ops.map(lambda i: 1 / i),
            route_errors(),
        )))

    errors.subscribe(on_next=errors_result.append,
                     on_error=errors_error.append)
    data.subscribe(on_next=actual_result.append, on_error=actual_error.append)

    assert errors_error == []
    assert len(errors_result) == 2
    assert type(errors_result[0]) is ZeroDivisionError
    assert type(errors_result[1]) is ZeroDivisionError
    assert actual_error == []
    assert actual_result == [1.0, 0.5]
Пример #17
0
def test_dead_letter_multi_instance():
    source = [1, 0, 2, 4, 0]
    actual_result = []
    actual_error = []
    errors_result = []
    errors_error = []

    errors, route_errors = rs.error.create_error_router()

    data = rx.from_(source).pipe(
        rs.ops.multiplex(
            rx.pipe(
                rs.ops.map(lambda i: 1 / i),
                route_errors(),
                rs.ops.map(lambda i: i / (i - 0.5)),
                route_errors(),
            )))

    errors.subscribe(on_next=errors_result.append,
                     on_error=errors_error.append)
    data.subscribe(on_next=actual_result.append, on_error=actual_error.append)

    assert errors_error == []
    assert len(errors_result) == 3
    for e in errors_result:
        assert type(e) is ZeroDivisionError
    assert actual_error == []
    assert actual_result == [2.0, -1.0]
Пример #18
0
def lttb_operator(n_out: int):
    """
    Input frame must be datetime indexed.
    If multiple columns are present, each column will be returned in array of dataframes
    :param n_out:
    :return: array of dataframes
    """
    def lttb_ops(x: pd.DataFrame):
        if x.shape[0] <= n_out:
            # just split into multiple dataframes
            ret = [
                pd.DataFrame(index=x.index, data=x[col]) for col in x.columns
            ]
            return ret

        # convert DataFrame index (datetime) to int
        x = x.copy()
        x.index = x.index.astype("int64")

        # lttb calculation
        ret = []
        for col in x.columns:
            data = np.array((x.index.values, x[col])).T
            output = lttb.downsample(data, n_out)
            df = pd.DataFrame(index=output[:, 0].astype('datetime64[ns]'),
                              data={col: output[:, 1]})
            ret.append(df)
        return ret

    return rx.pipe(operators.map(lttb_ops))
Пример #19
0
    def test_export_identity_no_id(self):
        def mock(request):
            self.assertEqual(request.url_post_fix, "/identity/source")
            return MockResponse(bytes("identity content", encoding="utf-8"))

        self.gaiaRef = mock_gaia_ref(mock)
        self.assertRaises(TypeError, lambda: pipe(ops.first())(self.gaiaRef.identity().export()).run())
Пример #20
0
def mean(key_mapper=lambda i: i, reduce=False):
    '''Averages the items emitted in the source observable

    The source can be an Observable or a MuxObservable.

    .. marble::
        :alt: mean

        -0---1---2---3---4---|
        [        max()       ]
        -0--0.5--1---1.5-2.0-|

    Args:
        key_mapper: [Optional] a function called on each item before computing
            the average.
        reduce: [Optional] Emit an item for each source item when reduce is
            False, otherwise emits a single item on completion.

    Returns:
        An observable emitting items whose value is the sum of source items.
    '''
    def accumulate(acc, i):
        i = key_mapper(i)
        return (acc[0]+i, acc[1]+1)

    return rx.pipe(
        rs.ops.scan(accumulate, (0, 0), reduce=reduce),
        rs.ops.map(lambda acc: acc[0] / acc[1] if acc is not None else None),
    )
Пример #21
0
def cartesian_product():
    """
    this method take stream of list (--[a,b]--[1,2]-->)
    convert each list to a stream (--a--b--> --1--2-->)
    convert output to a stream contain one list of streams (--[--a--b--> , --1--2-->]-->)
    convert output to a cartesian product stream (--[a,1]--[a,2]--[b,1]--[b,2]-->)
    """
    def cartesian(sources):
        """
        this method take list of streams (--[--a--b--> , --1--2-->]-->)
        convert output to a cartesian product stream (--[a,1]--[a,2]--[b,1]--[b,2]-->)
        """
        if len(sources) == 0:
            return empty()

        result = sources[0].pipe(ops.map(lambda s: [s]))

        def two_streams_product(stream2, stream1):
            product = stream1.pipe(
                ops.flat_map(
                    lambda s1: stream2.pipe(ops.map(lambda s2: s1 + [s2]))))
            return product

        for i in range(1, len(sources)):
            result = two_streams_product(sources[i], result)

        return result

    return pipe(ops.map(lambda _list: from_list(_list)), ops.to_list(),
                ops.flat_map(lambda i: cartesian(i)))
Пример #22
0
def test_multiplex():
    source = [1, 2, 3, 4]
    actual_error = []
    actual_completed = []
    actual_result = []
    mux_actual_result = []

    def on_completed():
        actual_completed.append(True)

    rx.from_(source).pipe(
        rs.ops.multiplex(rx.pipe(ops.do_action(
            mux_actual_result.append), ), ), ).subscribe(
                on_next=actual_result.append,
                on_completed=on_completed,
                on_error=actual_error.append,
            )

    assert actual_error == []
    assert actual_completed == [True]
    assert actual_result == source
    assert mux_actual_result == [
        rs.OnCreateMux((0, )),
        rs.OnNextMux((0, ), 1),
        rs.OnNextMux((0, ), 2),
        rs.OnNextMux((0, ), 3),
        rs.OnNextMux((0, ), 4),
        rs.OnCompletedMux((0, )),
    ]
Пример #23
0
def group_by(key_mapper, pipeline):
    """Groups items of according to a key mapper

    The source must be a MuxObservable.

    .. marble::
        :alt: group_by

        --1--2--a--3--b--c-|
        [    group_by()    ]
        -+-----+-----------|
               +a-----b--c-|
         +1--2-----3-------|

    Examples:
        >>> rs.ops.group_by(lambda i: i.category, rs.ops.count)

    Args:
        key_mapper: A function to extract the key from each item
        pipeline: The Rx pipe to execute on each group.

    Returns:
        A MuxObservable with one observable per group.
    """
    _group_by, outer_obs = group_by_mux(key_mapper)

    return rx.pipe(
        _group_by,
        pipeline,
        demux_mux_observable(outer_obs),
    )
Пример #24
0
def variance(key_mapper=lambda i: i, reduce=False):
    ''' Computes the variance of the items emitted in the source observable.

    The implementation is based on the formal definition of the variance.
    This implies that all items are cached in memory to do the computation.
    Use the rxsci.math.variance operator to compute varianc on a large
    observable.

    Args:
        key_mapper: [Optional] a function called on each item before computing
            the variance.
        reduce: [Optional] Emit an item for each source item when reduce is
            False, otherwise emits a single item on completion.

    Returns:
        An observable emitting variance of source items.
    '''
    def accumulate(acc, i):
        i = key_mapper(i)
        acc.append(i)
        return acc

    def _variance(acc):
        if len(acc) == 0:
            return 0.0
        else:
            mean = _moment(acc, 0, 1)
            v = _moment(acc, mean, 2)
            acc.clear()
            return v

    return rx.pipe(
        rs.ops.scan(accumulate, [], reduce=reduce),
        rs.ops.map(_variance),
    )
Пример #25
0
def split(predicate, pipeline):
    ''' Split an observable based on a predicate criteria.

    The source must be a MuxObservable.

    .. marble::
        :alt: split

        -1,a--1,b-1,c-2,b-2,c-|
        [       split()       ]
        -+------------+-------|
                      +2,b-2,c|
         +1,a-1,b--1,c|

    Args:
        predicate: A function called for each item, that returns the split
            criteria.
        pipeline: The Rx pipe to execute on each split.

    Returns:
        A higher order observable returning on observable for each split
        criteria.
    '''
    _split, outer_obs = split_mux(predicate)

    return rx.pipe(
        _split,
        pipeline,
        demux_mux_observable(outer_obs),
    )
Пример #26
0
def test_group_by_roll_sum2():
    source = [1, 2, 3, 4, 5, 6, 7, 8]
    actual_result = []
    rx.from_(source).pipe(
        rs.state.with_memory_store(
            rx.pipe(
                rs.ops.group_by(
                    lambda i: i % 2,
                    pipeline=rx.pipe(
                        rs.data.roll(window=2,
                                     stride=2,
                                     pipeline=rx.pipe(rs.math.sum(
                                         reduce=True), )), )), )), ).subscribe(
                                             on_next=actual_result.append)

    assert actual_result == [4, 6, 12, 14]
Пример #27
0
    def test_remove_nonexistent_file(self):
        def mock(request):
            self.assertEqual(request.url_post_fix, "/data/remove")
            return MockResponse({"fileExisted": False})

        self.gaiaRef = mock_gaia_ref(mock)
        result = pipe(ops.first())(self.gaiaRef.data("gaia://tenant/somefolder/nonexistentFile").remove()).run()
        self.assertEqual(result.file_existed, False)
Пример #28
0
    def test_list_files_in_nonexistent_dir(self):
        def mock(request):
            self.assertEqual(request.url_post_fix, "/data/list")
            return MockResponse([])

        self.gaiaRef = mock_gaia_ref(mock)
        result = pipe(ops.first())(self.gaiaRef.data("gaia://tenant1/nonexistentDirectory").list()).run()
        self.assertEqual(len(result), 0)
Пример #29
0
 def test_write_new_file_with_path(self):
     file = Path(tempfile.gettempdir()) / "file"
     self.gaiaRef = mock_gaia_ref(self.mock_write)
     with open(str(file), "wb+") as f:
         f.write(b"f\x00\x00bar")
     path = Path(file)
     response = self.gaiaRef.data("gaia://tenant/somefolder").add("newFile", path)
     assert pipe(ops.first())(response).run().uri == "gaia://tenant/somefolder/newFile"
Пример #30
0
def test_split():
    source = ["1a", "2a", "3b", "4b", "5c", "6c", "7c", "8d", "9d"]
    source = [
        rs.OnCreateMux((1, None)),
        rs.OnNextMux((1, None), '1a'),
        rs.OnNextMux((1, None), '2a'),
        rs.OnNextMux((1, None), '3b'),
        rs.OnNextMux((1, None), '4b'),
        rs.OnNextMux((1, None), '5c'),
        rs.OnNextMux((1, None), '6c'),
        rs.OnNextMux((1, None), '7c'),
        rs.OnNextMux((1, None), '8d'),
        rs.OnNextMux((1, None), '9d'),
        rs.OnCompletedMux((1, None)),
    ]
    actual_result = []
    mux_actual_result = []
    expected_result = [
        ["1a", "2a"],
        ["3b", "4b"],
        ["5c", "6c", "7c"],
        ["8d", "9d"],
    ]

    def on_next(i):
        actual_result.append(i)

    store = rs.state.StoreManager(store_factory=rs.state.MemoryStore)
    rx.from_(source).pipe(
        rs.cast_as_mux_observable(),
        rs.state.with_store(
            store,
            rs.data.split(lambda i: i[-1],
                          rx.pipe(ops.do_action(mux_actual_result.append), )),
        ),
    ).subscribe(on_next)

    assert type(mux_actual_result[0]) is rs.state.ProbeStateTopology
    assert mux_actual_result[1:] == [
        rs.OnCreateMux((1, (1, None)), store),
        rs.OnNextMux((1, (1, None)), '1a', store),
        rs.OnNextMux((1, (1, None)), '2a', store),
        rs.OnCompletedMux((1, (1, None)), store),
        rs.OnCreateMux((1, (1, None)), store),
        rs.OnNextMux((1, (1, None)), '3b', store),
        rs.OnNextMux((1, (1, None)), '4b', store),
        rs.OnCompletedMux((1, (1, None)), store),
        rs.OnCreateMux((1, (1, None)), store),
        rs.OnNextMux((1, (1, None)), '5c', store),
        rs.OnNextMux((1, (1, None)), '6c', store),
        rs.OnNextMux((1, (1, None)), '7c', store),
        rs.OnCompletedMux((1, (1, None)), store),
        rs.OnCreateMux((1, (1, None)), store),
        rs.OnNextMux((1, (1, None)), '8d', store),
        rs.OnNextMux((1, (1, None)), '9d', store),
        rs.OnCompletedMux((1, (1, None)), store),
    ]
    assert actual_result == source