Ejemplo n.º 1
0
def run_parallel(args):
    worker_factory, simulation_cases = factory.resolve_configuration(args)

    logger.debug("Shuffling the job queue")
    random.shuffle(simulation_cases)

    logger.debug("Creating the pool")

    processes_no = int(args["-j"])
    rxtools.configure_default_executor(processes_no)

    wall_time = []
    start_time = datetime.now()
    results = []
    logger.debug("Simulation cases: %s", simulation_cases)
    logger.debug("Work will be divided into %d processes", processes_no)

    sys = ActorSystem("multiprocTCPBase", logDefs=log_helper.EVOGIL_LOG_CONFIG)

    with log_time(system_time, logger, "Pool evaluated in {time_res}s", out=wall_time):

        def process_result(subres):
            results.append(subres)
            log_simulation_stats(start_time, subres[-1], len(simulation_cases))

        rx.from_iterable(range(len(simulation_cases))).pipe(
            ops.map(lambda i: worker_factory(simulation_cases[i], i)),
            # ops.map(lambda w: rxtools.from_process(w.run)),
            ops.map(lambda w : w.run()),
            # ops.merge(max_concurrent=1)
            ops.do_action(on_next=process_result)
        ).run()
    log_summary(args, results, simulation_cases, wall_time)
    rxtools.shutdown_default_executor()
    sys.shutdown()
Ejemplo n.º 2
0
 async def run(self):
     msg = await self.receive(timeout=1)
     if msg:
         # print("Window Tick  Stream")
         db_id = msg.get_metadata("db_id")
         window = await fx_db.get_fx_window(db_id)
         # print(window)
         rx.from_iterable(self.analysers).pipe(
             ops.filter(lambda a: a.symbol == window.symbol)).subscribe_(
                 lambda tick_analyser: tick_analyser.on_next(window))
Ejemplo n.º 3
0
    async def run(self):
        msg = await self.receive(timeout=1)
        if msg:
            # print(msg)
            tick_id = msg.get_metadata("fx_tick_id")
            tick = await fx_db.get_fx_tick(tick_id)
            # print(tick.symbol)
            # rx.from_iterable(self.analysers).subscribe_(print)
            rx.from_iterable(self.analysers).pipe(
                ops.filter(lambda a: a.symbol == tick.symbol)).subscribe_(
                    lambda tick_analyser: tick_analyser.on_next(tick))
            # print(fx_tick)

        await asyncio.sleep(delay=sleep_delay)
Ejemplo n.º 4
0
    def __init__(self,
                 endpoints: [typing.ServiceEndpoint] = None,
                 endpoint_clients: [typing.ServiceEndpointClient] = None,
                 request_correlator: typing.RequestCorrelator = DefaultRequestCorrelator()):

        super().__init__(endpoint_clients, request_correlator)

        if endpoints is None:
            self.__endpoints = []
        else:
            self.__endpoints = endpoints

        self.__requests = rx.from_iterable(map(lambda ep: ep.requests, self.__endpoints)).pipe(merge_all(), share())
        self.__commands = rx.from_iterable(map(lambda ep: ep.commands, self.__endpoints)).pipe(merge_all(), share())
Ejemplo n.º 5
0
 def _window_to_group(self, value):
     return value.pipe(
         ops.to_iterable(),
         ops.map(
             lambda x: rx.from_iterable(x).pipe(ops.group_by(
                 _group_by), ops.map(_group_to_batch), ops.merge_all())),
         ops.merge_all())
Ejemplo n.º 6
0
 def logs(self, number_of_lines: int) -> Observable[str]:
     return rx.from_callable(lambda: self._client.post_json({'uri': self._uri, 'numberOfLines': number_of_lines},
                                                            url_postfix="/skill/logs"), self._scheduler) \
         .pipe(
         ops.map(lambda r: json.loads(r.content)),
         ops.flat_map(lambda r: rx.from_iterable(r['logLines']))
     )
Ejemplo n.º 7
0
    def test_collect_works(self):
        obs = open_orders.collect(
            from_iterable([
                IbApiMessage(type=IbApiMessageType.OPEN_ORDER,
                             payload=(0, test_utils.appl_contract(), _order1(),
                                      _order_state1())),
                IbApiMessage(type=IbApiMessageType.OPEN_ORDER,
                             payload=(15, test_utils.ibkr_contract(),
                                      _order2(), _order_state2())),
            ]))

        # TODO: for now, Contract, Order, OrderState, etc cannot be tested
        # naively as they do not have __eq__ defined.
        assert_that(
            obs.run(),
            contains_exactly(
                all_of(
                    has_property('order_id', equal_to(0)),
                    has_property('contract', anything()),
                    has_property('order', anything()),
                    has_property('order_state', anything()),
                ),
                all_of(
                    has_property('order_id', equal_to(15)),
                    has_property('contract', anything()),
                    has_property('order', anything()),
                    has_property('order_state', anything()),
                )))
Ejemplo n.º 8
0
def _merge(*args: Union[Observable, Iterable[Observable]]) -> Observable:
    sources = args[:]

    if isinstance(sources[0], Iterable):
        sources = sources[0]

    return rx.from_iterable(sources).pipe(ops.merge_all())
Ejemplo n.º 9
0
def main():
    loop = asyncio.get_event_loop()
    io_scheduler = AsyncIOThreadSafeScheduler(loop=loop)
    scheduler = ThreadPoolScheduler(multiprocessing.cpu_count())

    video_stream_observable = rx.using(
        lambda: VideoStreamDisposable(),
        lambda d: rx.from_iterable(video_stream_iterable(d.cap)))

    disposable = video_stream_observable.pipe(
        ops.subscribe_on(scheduler),
        ops.sample(1 / ARGS.fps),  # sample frames based on fps
        ops.filter(has_face),  # filter frames without faces
        ops.map(lambda frame: Image.fromarray(
            cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))),  # map frame to PIL image
        ops.observe_on(io_scheduler),
        ops.map(lambda img: ImageFacesPair(img, analyse_frame(img))
                ),  # analyse faces on frame
        ops.filter(
            lambda img_faces_pair: any([
                face.top_prediction.confidence >= ARGS.min_confidence and face.
                top_prediction.confidence <= ARGS.max_confidence
                for face in img_faces_pair.faces
            ])
        ),  # proceed only if min_confidence <= person_confidence <= max_confidence
        ops.do_action(on_next=save_frame)).subscribe(
            on_error=lambda e: logger.exception(e))

    try:
        loop.run_forever()
    except Exception as e:
        logger.exception(e)
        logger.info("Data collector shutdown")
        disposable.dispose()
Ejemplo n.º 10
0
    def test_collect_works(self):
        obs = position.collect(
            from_iterable([
                IbApiMessage(type=IbApiMessageType.POSITION,
                             payload=('DU123', test_utils.appl_contract(),
                                      100.0, 123.45)),
                IbApiMessage(type=IbApiMessageType.POSITION,
                             payload=('DU123', test_utils.ibkr_contract(), 1.0,
                                      45.6789)),
            ]))

        # TODO: for now, Contract, etc cannot be tested naively as they do not have __eq__ defined.
        assert_that(
            obs.run(),
            contains_exactly(
                all_of(
                    has_property('account', equal_to('DU123')),
                    has_property('contract', anything()),
                    has_property('size', equal_to(100.0)),
                    has_property('average_cost', equal_to(123.45)),
                ),
                all_of(
                    has_property('account', equal_to('DU123')),
                    has_property('contract', anything()),
                    has_property('size', equal_to(1.0)),
                    has_property('average_cost', equal_to(45.6789)),
                )))
Ejemplo n.º 11
0
def get_files(base_dir: Path, filter_regex: str,
              scheduler: rx.typing.Scheduler) -> rx.Observable:
    """Return an observable of files to process as FileTarget's."""
    return rx.from_iterable(
        fl.file_listing_iterator(base_dir, filter_regex),
        scheduler=scheduler,
    )
Ejemplo n.º 12
0
 def to_rx(self, *inputs: rx.Observable) -> rx.Observable:
     # NOTE If Module is not attached to graph, do not check len(inputs)
     # TODO Does it make sense for module to be detached from graph?
     if self._is_used_in_static_graph:
         self._check_num_inputs(len(inputs), check_nodes=True)
     self.consume(*inputs)
     # Create dummy observable to satisfy type signature
     return rx.from_iterable([])
Ejemplo n.º 13
0
def _solve(print=print):
    total = rx.from_iterable(primes()) \
        .pipe(
        ops.take_while(lambda p: p < 2000000),
        ops.sum(),
    ).run()
    print(f'The sum of primes below 2m: {total}')
    return True
Ejemplo n.º 14
0
def _solve(print=print):
    v = rx.from_iterable(primes()) \
      .pipe(
        ops.skip(10000),
        ops.take(1),
    ).run()
    print(v)
    return True
Ejemplo n.º 15
0
async def test_rx_support_request_channel_server_take_only_n(pipe: Tuple[RSocketServer, RSocketClient],
                                                             take_only_n):
    server, client = pipe
    received_messages = []
    items_generated = 0
    maximum_message_count = 3
    wait_for_server_finish = asyncio.Event()

    class Handler(BaseRequestHandler, DefaultSubscriber):

        def on_next(self, value: Payload, is_complete=False):
            received_messages.append(value)
            if len(received_messages) < take_only_n:
                self.subscription.request(1)
            else:
                self.subscription.cancel()
                wait_for_server_finish.set()

        def on_complete(self):
            wait_for_server_finish.set()

        async def on_error(self, error_code: ErrorCode, payload: Payload):
            wait_for_server_finish.set()

        def on_subscribe(self, subscription: Subscription):
            super().on_subscribe(subscription)
            subscription.request(1)

        async def request_channel(self, payload: Payload) -> Tuple[Optional[Publisher], Optional[Subscriber]]:
            return None, self

    server.set_handler_using_factory(Handler)

    rx_client = RxRSocket(client)

    def generator():
        nonlocal items_generated
        for x in range(maximum_message_count):
            items_generated += 1
            yield Payload('Feed Item: {}'.format(x).encode('utf-8'))

    await rx_client.request_channel(
        Payload(b'request text'),
        observable=rx.from_iterable(generator())
    ).pipe(
        operators.to_list()
    )

    await wait_for_server_finish.wait()

    maximum_message_received = min(maximum_message_count, take_only_n)

    # assert items_generated == maximum_message_received # todo: Stop async generator on cancel from server requester

    assert len(received_messages) == maximum_message_received

    for i in range(maximum_message_received):
        assert received_messages[i].data == ('Feed Item: %d' % i).encode()
Ejemplo n.º 16
0
 def step(self):
     last_result = (rx.from_iterable(self.islands).pipe(
         ops.subscribe_on(NewThreadScheduler()),
         ops.flat_map(lambda island: island.epoch(self.epoch_length).pipe(
             ops.last())),
         ops.buffer_with_count(len(self.islands)),
     ).run())
     self.migration()
     self.update_cost(last_result)
Ejemplo n.º 17
0
def test_input_module():
    inputs = [Tensor((1, ), "int", x) for x in [1, 2, 3]]
    expected = inputs
    results = []

    module = InputModule((1, ), "int")
    obs = module.to_rx(rx.from_iterable(inputs))
    obs.subscribe(lambda x: results.append(x))

    assert results == expected
Ejemplo n.º 18
0
def _window_to_group(value):
    return value.pipe(
        ops.to_iterable(),
        ops.map(lambda x: rx.from_iterable(x).pipe(
            # Group window by 'organization', 'bucket' and 'precision'
            ops.group_by(_group_by),
            # Create batch (concatenation line protocols by \n)
            ops.map(_group_to_batch),
            ops.merge_all())),
        ops.merge_all())
Ejemplo n.º 19
0
    def assert_observed_data_ok(self, data, reduce_func, start_cond, end_cond,
                                exp_nexts, exp_errs, exp_compl):
        obs = MockObserver()

        stream = rx.from_iterable(data)
        stream.pipe(observing.reduce_while(reduce_func, start_cond,
                                           end_cond)).subscribe(obs)

        self.assertEqual(exp_nexts, obs.nexts)
        self.assertEqual(exp_errs, obs.errs)
        self.assertEqual(exp_compl, obs.compl)
Ejemplo n.º 20
0
def _failed_transfers(store):
    processing_files = ReplaySubject()

    def transfer_files():
        state = store.getState()
        if (state.processing):
            processing_files.on_next(state.processing)

    store.subscribe(transfer_files)
    return processing_files.pipe(
        operators.map(lambda paths: rx.from_iterable(paths)),
        operators.merge_all(), operators.flat_map(_transfer_file))
Ejemplo n.º 21
0
def convert_to_syncmaps():
    input_observable = rx.from_iterable(get_files('srt', 'json'),
                                        pool_scheduler)
    input_observable.pipe(
        ops.map(srt_to_dict),
        ops.map(to_syncmap),
    ).subscribe(
        on_next=lambda filename: print(f"Converted {filename}"),
        on_completed=lambda: print("Converting completed"),
        on_error=lambda err: print(f"ERROR: type: {type(err)}, message: {err}"
                                   ),
    )
Ejemplo n.º 22
0
    def test_collect_historical_trade_data(self):
        bar_data_1 = IbBarData()
        bar_data_1.date = '1577000000'
        bar_data_1.open = 1
        bar_data_1.high = 2
        bar_data_1.low = 1.5
        bar_data_1.close = 1.75
        bar_data_1.average = 1.567
        bar_data_1.volume = 9414151
        bar_data_1.barCount = 3435

        bar_data_2 = IbBarData()
        bar_data_2.date = '1577003600'
        bar_data_2.open = 2
        bar_data_2.high = 3
        bar_data_2.low = 1.25
        bar_data_2.close = 2.25
        bar_data_2.average = 2.123
        bar_data_2.volume = 4543625
        bar_data_2.barCount = 3821
        messages = [
            IbApiMessage(type=IbApiMessageType.HISTORICAL_DATA,
                         payload=(0, bar_data_1)),
            IbApiMessage(type=IbApiMessageType.HISTORICAL_DATA,
                         payload=(0, bar_data_2)),
        ]

        assert_that(
            historical_data.collect(from_iterable(messages), 0,
                                    types.HistoricalDataType.TRADES).run(),
            contains_exactly(
                equal_to(
                    types.BarData(
                        type=types.HistoricalDataType.TRADES,
                        time=datetime.datetime(2019, 12, 22, 7, 33, 20),
                        open=1,
                        high=2,
                        low=1.5,
                        close=1.75,
                        trade_data=types.BarData.TradeData(volume=9414151,
                                                           average_price=1.567,
                                                           bar_count=3435))),
                equal_to(
                    types.BarData(
                        type=types.HistoricalDataType.TRADES,
                        time=datetime.datetime(2019, 12, 22, 8, 33, 20),
                        open=2,
                        high=3,
                        low=1.25,
                        close=2.25,
                        trade_data=types.BarData.TradeData(volume=4543625,
                                                           average_price=2.123,
                                                           bar_count=3821)))))
Ejemplo n.º 23
0
def run(onComplete=lambda: None):
    limit = 10 ** 22
    optimal_thread_count = multiprocessing.cpu_count()
    pool_scheduler = ThreadPoolScheduler(optimal_thread_count)

    count = rx.from_iterable(pandigital_step_numbers()) \
        .pipe(
        ops.take_while(lambda n: n < limit),
        ops.count(),
    ) \
        .run()

    onComplete(count)
Ejemplo n.º 24
0
    def start_with(source: Observable) -> Observable:
        """Partially applied start_with operator.

        Prepends a sequence of values to an observable sequence.

        Example:
            >>> start_with(source)

        Returns:
            The source sequence prepended with the specified values.
        """
        start = rx.from_iterable(args)
        sequence = [start, source]
        return rx.concat(*sequence)
Ejemplo n.º 25
0
    def __init__(self, stub):
        self._stub = stub
        self._slots = [
            'OnReceiveMsg',
            'OnReceiveTrData',
            'OnReceiveChejanData',
            'OnEventConnect',
        ]

        request = KiwoomOpenApiService_pb2.ListenRequest()
        request.slots.extend(self._slots)  # pylint: disable=no-member
        self._response_iterator = self._stub.Listen(request)
        self._subscription = rx.from_iterable(
            self._response_iterator).subscribe(self)
Ejemplo n.º 26
0
def test_forward_module():
    class MyForwardModule(ForwardModule):
        def forward(self, x: Tensor) -> Tensor:
            return Tensor((None, ), "str", str(x.data))

    inputs = [Tensor((1, ), "int", x) for x in [1, 2, 3]]
    expected = [Tensor((None, ), "str", x) for x in ["1", "2", "3"]]
    results = []

    module = MyForwardModule((None, ), "str")
    obs = module.to_rx(rx.from_iterable(inputs))
    obs.subscribe(lambda x: results.append(x))

    assert results == expected
Ejemplo n.º 27
0
    def start_with(source: Observable) -> Observable:
        """Partially applied start_with operator.

        Prepends a sequence of values to an observable sequence.

        Example:
            >>> start_with(source)

        Returns:
            The source sequence prepended with the specified values.
        """
        start = rx.from_iterable(args)
        sequence = [start, source]
        return rx.concat(*sequence)
Ejemplo n.º 28
0
def test_forward_async_module():
    class MyForwardAsyncModule(ForwardAsyncModule):
        def forward(self, xs: rx.Observable) -> rx.Observable:
            return xs.pipe(ops.map(str))

    inputs = [1, 2, 3]
    expected = ["1", "2", "3"]
    results = []

    module = MyForwardAsyncModule((None, ), "str")
    obs = module.to_rx(rx.from_iterable(inputs))
    obs.subscribe(lambda x: results.append(x))

    assert results == expected
Ejemplo n.º 29
0
def main():
    loop = asyncio.get_event_loop()
    io_scheduler = AsyncIOThreadSafeScheduler(loop=loop)
    scheduler = ThreadPoolScheduler(multiprocessing.cpu_count())

    semaphore = Subject()

    semaphore_stream = semaphore.pipe(
        ops.flat_map(lambda _: rx.of(True).pipe(
            ops.delay(ARGS.block_time, scheduler=scheduler),
            ops.start_with(False))), ops.start_with(True))

    video_stream_observable = rx.using(
        lambda: VideoStreamDisposable(),
        lambda d: rx.from_iterable(video_stream_iterable(d.cap)))

    gated_video_stream = video_stream_observable.pipe(
        ops.subscribe_on(scheduler),
        ops.sample(1 / ARGS.fps),  # sample frames based on fps
        ops.combine_latest(semaphore_stream),
        ops.filter(lambda tup: tup[1]),  # proceed only if semaphore allows
        ops.map(lambda tup: tup[0])  # take only frame
    )

    disposable = gated_video_stream.pipe(
        ops.filter(has_face),  # filter frames without faces
        ops.map(lambda frame: Image.fromarray(
            cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))),  # map frame to PIL image
        ops.map(lambda img: img.resize(
            (640, 360))),  # resize image (inference will be faster)
        ops.observe_on(io_scheduler),
        ops.map(lambda img: ImageFacesPair(img, analyse_frame(img))
                ),  # analyse frame for faces
        ops.filter(lambda img_faces_pair: any([
            face.top_prediction.confidence > ARGS.threshold
            for face in img_faces_pair.faces
        ])),  # proceed only if there is a known face in the frame
        ops.throttle_first(1),
        ops.flat_map(unlock_request),  # unlock the door
        ops.do_action(
            on_next=lambda _: semaphore.on_next(True)
        )  # trigger semaphore which will block stream for "block-seconds" seconds (doors are unlocked for that long after unlock request)
    ).subscribe(on_error=lambda e: logger.exception(e))

    try:
        loop.run_forever()
    except Exception as e:
        logger.exception(e)
        logger.info("Smart lock face recognition engine shutdown")
        disposable.dispose()
Ejemplo n.º 30
0
def intensity_steps(steps: List[Step], flushes):
    """

    Given an intensity value, return the list of steps
    which should be played back. The logic indexes higher
    based on the intensity value until it is reset by the final step

    :param steps:
    """

    return pipe(
        ops.flat_map(lambda event: rx.from_iterable(steps).pipe(
            ops.map(lambda step: (step, event)))),
        ops.filter(lambda x: x[0] <= x[1]), ops.map(lambda x: x[0]),
        ops.distinct(flushes=flushes))
Ejemplo n.º 31
0
def test_run_empty_graph():
    def create_graph():
        inputs = [Input((1, ), "int")]
        outputs = inputs
        return Model(inputs=inputs, outputs=outputs)

    inputs = [Tensor((1, ), "int", x) for x in [1, 2, 3]]
    expected = inputs
    results = []

    model = create_graph()
    model.setup_blocking()
    obs = model.to_rx(rx.from_iterable(inputs))
    obs[0].subscribe(lambda x: results.append(x))

    assert results == expected
Ejemplo n.º 32
0
def _merge(*sources: Observable) -> Observable:
    return rx.from_iterable(sources).pipe(ops.merge_all())
Ejemplo n.º 33
0
 def action1(scheduler, state):
     xs[0] = rx.from_iterable(["alpha", "apple", "beta", "bat", "gamma"]) \
         .pipe(ops.group_by(lambda s: s[0]),
               ops.map(lambda xs: xs.pipe(ops.to_iterable(), ops.map(list))),
               ops.merge_all(),
               )
Ejemplo n.º 34
0
 def test_observer_throws(self):
     with self.assertRaises(RxException):
         rx.from_iterable([1, 2, 3]).subscribe(lambda x: _raise('ex'))
Ejemplo n.º 35
0
def _sequence_equal(second: Observable, comparer: Callable[[Any, Any], bool] = None
                   ) -> Callable[[Observable], Observable]:
    comparer = comparer or default_comparer
    if isinstance(second, collections.abc.Iterable):
        second = rx.from_iterable(second)

    def sequence_equal(source: Observable) -> Observable:
        """Determines whether two sequences are equal by comparing the
        elements pairwise using a specified equality comparer.

        Examples:
            >>> res = sequence_equal([1,2,3])
            >>> res = sequence_equal([{ "value": 42 }], lambda x, y: x.value == y.value)
            >>> res = sequence_equal(rx.return_value(42))
            >>> res = sequence_equal(rx.return_value({ "value": 42 }), lambda x, y: x.value == y.value)

        Args:
            source: Source obserable to compare.

        Returns:
            An observable sequence that contains a single element which
        indicates whether both sequences are of equal length and their
        corresponding elements are equal according to the specified
        equality comparer.
        """
        first = source

        def subscribe(observer, scheduler=None):
            donel = [False]
            doner = [False]
            ql = []
            qr = []

            def on_next1(x):
                if len(qr) > 0:
                    v = qr.pop(0)
                    try:
                        equal = comparer(v, x)
                    except Exception as e:
                        observer.on_error(e)
                        return

                    if not equal:
                        observer.on_next(False)
                        observer.on_completed()

                elif doner[0]:
                    observer.on_next(False)
                    observer.on_completed()
                else:
                    ql.append(x)

            def on_completed1():
                donel[0] = True
                if not ql:
                    if qr:
                        observer.on_next(False)
                        observer.on_completed()
                    elif doner[0]:
                        observer.on_next(True)
                        observer.on_completed()

            def on_next2(x):
                if len(ql) > 0:
                    v = ql.pop(0)
                    try:
                        equal = comparer(v, x)
                    except Exception as exception:
                        observer.on_error(exception)
                        return

                    if not equal:
                        observer.on_next(False)
                        observer.on_completed()

                elif donel[0]:
                    observer.on_next(False)
                    observer.on_completed()
                else:
                    qr.append(x)

            def on_completed2():
                doner[0] = True
                if not qr:
                    if len(ql) > 0:
                        observer.on_next(False)
                        observer.on_completed()
                    elif donel[0]:
                        observer.on_next(True)
                        observer.on_completed()

            subscription1 = first.subscribe_(on_next1, observer.on_error, on_completed1, scheduler)
            subscription2 = second.subscribe_(on_next2, observer.on_error, on_completed2, scheduler)
            return CompositeDisposable(subscription1, subscription2)
        return Observable(subscribe)
    return sequence_equal