async def request_fragmented_stream(client: RxRSocket): payload = Payload( b'The quick brown fox', composite(route('fragmented_stream'), authenticate_simple('user', '12345'))) result = await client.request_stream(payload).pipe(operators.to_list()) print(result)
def create(): pfh = self.test_obs.pipe(pairwise_buffer, ops.map(lambda x: x[0])) w = pfh.pipe( ops.window( self.test_obs.pipe(pairwise_buffer, ops.filter(lambda x: x[0] != x[1])))) return w.pipe(ops.flat_map(lambda x: x.pipe(ops.to_list())))
async def test_rx_support_request_stream_with_error(pipe: Tuple[RSocketServer, RSocketClient], success_count, request_limit): server, client = pipe async def generator() -> AsyncGenerator[Tuple[Payload, bool], None]: for x in range(success_count): yield Payload('Feed Item: {}'.format(x).encode('utf-8')), False raise Exception('Some error from responder') class Handler(BaseRequestHandler): async def request_stream(self, payload: Payload) -> Publisher: return StreamFromAsyncGenerator(generator) server.set_handler_using_factory(Handler) rx_client = RxRSocket(client) with pytest.raises(Exception): await rx_client.request_stream( Payload(b'request text'), request_limit=request_limit).pipe( operators.map(lambda payload: payload.data), operators.to_list())
def cartesian_product(): """ this method take stream of list (--[a,b]--[1,2]-->) convert each list to a stream (--a--b--> --1--2-->) convert output to a stream contain one list of streams (--[--a--b--> , --1--2-->]-->) convert output to a cartesian product stream (--[a,1]--[a,2]--[b,1]--[b,2]-->) """ def cartesian(sources): """ this method take list of streams (--[--a--b--> , --1--2-->]-->) convert output to a cartesian product stream (--[a,1]--[a,2]--[b,1]--[b,2]-->) """ if len(sources) == 0: return empty() result = sources[0].pipe(ops.map(lambda s: [s])) def two_streams_product(stream2, stream1): product = stream1.pipe( ops.flat_map( lambda s1: stream2.pipe(ops.map(lambda s2: s1 + [s2])))) return product for i in range(1, len(sources)): result = two_streams_product(sources[i], result) return result return pipe(ops.map(lambda _list: from_list(_list)), ops.to_list(), ops.flat_map(lambda i: cartesian(i)))
async def test_rx_support_request_channel_server_take_only_n(pipe: Tuple[RSocketServer, RSocketClient], take_only_n): server, client = pipe received_messages = [] items_generated = 0 maximum_message_count = 3 wait_for_server_finish = asyncio.Event() class Handler(BaseRequestHandler, DefaultSubscriber): def on_next(self, value: Payload, is_complete=False): received_messages.append(value) if len(received_messages) < take_only_n: self.subscription.request(1) else: self.subscription.cancel() wait_for_server_finish.set() def on_complete(self): wait_for_server_finish.set() async def on_error(self, error_code: ErrorCode, payload: Payload): wait_for_server_finish.set() def on_subscribe(self, subscription: Subscription): super().on_subscribe(subscription) subscription.request(1) async def request_channel(self, payload: Payload) -> Tuple[Optional[Publisher], Optional[Subscriber]]: return None, self server.set_handler_using_factory(Handler) rx_client = RxRSocket(client) def generator(): nonlocal items_generated for x in range(maximum_message_count): items_generated += 1 yield Payload('Feed Item: {}'.format(x).encode('utf-8')) await rx_client.request_channel( Payload(b'request text'), observable=rx.from_iterable(generator()) ).pipe( operators.to_list() ) await wait_for_server_finish.wait() maximum_message_received = min(maximum_message_count, take_only_n) # assert items_generated == maximum_message_received # todo: Stop async generator on cancel from server requester assert len(received_messages) == maximum_message_received for i in range(maximum_message_received): assert received_messages[i].data == ('Feed Item: %d' % i).encode()
async def request_channel(client: RxRSocket): # channel_completion_event = Event() # requester_completion_event = Event() payload = Payload( b'The quick brown fox', composite(route('channel'), authenticate_simple('user', '12345'))) # publisher = from_rsocket_publisher(sample_publisher(requester_completion_event)) result = await client.request_channel(payload, 5).pipe(operators.to_list())
def to_pandas(columns=None): """Converts an observable to a pandas dataframe Source: An Observable of namedtuples Returns: An observable the emits a single item. This item is a pandas dataframe. """ return rx.pipe( ops.to_list(), ops.map(lambda i: pd.DataFrame( i, columns=i[0]._fields if columns is None else columns)))
async def test_rx_support_request_channel_with_error_from_requester( pipe: Tuple[RSocketServer, RSocketClient], success_count, request_limit): server, client = pipe responder_received_error = asyncio.Event() server_received_messages = [] received_error = None class ResponderSubscriber(DefaultSubscriber): def on_subscribe(self, subscription: Subscription): super().on_subscribe(subscription) self.subscription.request(1) def on_next(self, value, is_complete=False): if len(value.data) > 0: server_received_messages.append(value.data) self.subscription.request(1) def on_error(self, exception: Exception): nonlocal received_error received_error = exception responder_received_error.set() async def generator() -> AsyncGenerator[Tuple[Payload, bool], None]: for x in range(success_count): yield Payload('Feed Item: {}'.format(x).encode( 'utf-8')), x == success_count - 1 class Handler(BaseRequestHandler): async def request_channel( self, payload: Payload ) -> Tuple[Optional[Publisher], Optional[Subscriber]]: return StreamFromAsyncGenerator(generator), ResponderSubscriber() server.set_handler_using_factory(Handler) rx_client = RxRSocket(client) def test_observable(observer: Observer, scheduler: Optional[Scheduler]): observer.on_error(Exception('Some error')) return Disposable() await rx_client.request_channel( Payload(b'request text'), observable=rx.create(test_observable), request_limit=request_limit).pipe( operators.map(lambda payload: payload.data), operators.to_list()) await responder_received_error.wait() assert str(received_error) == 'Some error'
def subscribe(observer, scheduler): def on_next(value): try: args = parser.parse_args(value) for key, value in vars(args).items(): observer.on_next(Argument(key=key, value=value)) except NameError as exc: observer.on_error("{}\n{}".format(exc, parser.format_help())) return argv.pipe(ops.to_list()).subscribe( on_next=on_next, on_error=observer.on_error, on_completed=observer.on_completed)
def test_parse(self): parser = argparse.ArgumentParser("test_parse") parser.add_argument("--foo") expected_result = [argparse.Argument(key="foo", value="fooz")] actual_result = None def set_result(i): nonlocal actual_result actual_result = i args = rx.from_(["--foo", "fooz"]) args.pipe(argparse.parse(parser), ops.to_list()).subscribe(set_result) self.assertEqual(expected_result, actual_result)
def read(self, identity_id: str) -> [str]: """ Reads 5 intents from a given identity and returns a list of qualifiers. """ def config(x): x.identity_id() x.qualifier() result = self._gaia_sdk.retrieve_intents(identity_id, config) \ .pipe(ops.to_list()).run() items = list(map(lambda intent: intent.qualifier, result)) return take(5, items)
def rx_review_practice_from_df(df: DataFrame, fn_get_file_info: Callable[ [str, str, str], Dict], fn_get_commit_list_of_a_file: Callable[[str, str, str, str], List[Dict]], get_file_content: Callable[[str], bytes], practice: Practice) -> rx.Observable: practice_reviewer = check_and_review_practice_from_git( fn_get_file_info, fn_get_commit_list_of_a_file, get_file_content, practice) operations = [ op.map(lambda x: x[1]), # ignore the index, use the data op.map(practice_reviewer), op.to_list(), op.map(lambda results: {practice.name: Series(results)}) ] return from_(df.iterrows()).pipe(*operations)
async def test_rx_support_request_channel_response_take_only_n(pipe: Tuple[RSocketServer, RSocketClient], take_only_n): server, client = pipe maximum_message_count = 4 wait_for_server_finish = asyncio.Event() items_generated = 0 async def generator() -> AsyncGenerator[Tuple[Payload, bool], None]: nonlocal items_generated for x in range(maximum_message_count): items_generated += 1 yield Payload('Feed Item: {}'.format(x).encode('utf-8')), x == maximum_message_count - 1 class Handler(BaseRequestHandler): async def request_channel(self, payload: Payload) -> Tuple[Optional[Publisher], Optional[Subscriber]]: def set_server_finished(): wait_for_server_finish.set() return StreamFromAsyncGenerator(generator, on_cancel=set_server_finished, on_complete=set_server_finished), None server.set_handler_using_factory(Handler) rx_client = RxRSocket(client) received_messages = await rx_client.request_channel( Payload(b'request text'), request_limit=1 ).pipe( operators.map(lambda payload: payload.data), operators.take(take_only_n), operators.to_list() ) if take_only_n > 0: await wait_for_server_finish.wait() maximum_message_received = min(maximum_message_count, take_only_n) assert len(received_messages) == maximum_message_received, 'Received message count wrong' assert items_generated == maximum_message_received, 'Received message count wrong' for i in range(maximum_message_received): assert received_messages[i] == ('Feed Item: %d' % i).encode()
def to_pandas(columns=None): """Converts an observable to a pandas dataframe If colums is not specified, then items must be namedtuples and the columns names are infered from the fields of the namedtuple objects. The source must be an Observable. Args: columns: [Optional] Returns: An observable the emits a single item. This item is a pandas DataFrame. """ return rx.pipe( ops.to_list(), ops.map(lambda i: pd.DataFrame( i, columns=i[0]._fields if columns is None else columns)))
async def test_rx_support_request_stream_cancel_on_timeout( pipe: Tuple[RSocketServer, RSocketClient]): server, client = pipe cancel_done = asyncio.Event() stream_messages_sent_count = 0 class Handler(BaseRequestHandler, DefaultPublisherSubscription): async def delayed_stream(self): nonlocal stream_messages_sent_count try: await asyncio.sleep(3) self._subscriber.on_next(Payload(b'success')) stream_messages_sent_count += 1 except asyncio.CancelledError: cancel_done.set() def cancel(self): self._task.cancel() def request(self, n: int): self._task = asyncio.create_task(self.delayed_stream()) async def request_stream(self, payload: Payload) -> Publisher: return self server.set_handler_using_factory(Handler) rx_client = RxRSocket(client) with pytest.raises(Exception): await asyncio.wait_for( rx_client.request_stream(Payload(b'request text')).pipe( operators.to_list()), 2) await cancel_done.wait() assert stream_messages_sent_count == 0
('precip_probability', 'float'), ]) Features = namedtuple( 'Features', ['label', 'pspeed_ratio', 'temperature', 'temperature_stddev']) epsilon = 1e-5 features = csv.load_from_file(dataset_path, parser).pipe( rs.ops.map(lambda i: Features( label=i.house_overall, pspeed_ratio=i.pressure / (i.wind_speed + epsilon), temperature=i.temperature, temperature_stddev=0.0, )), rs.state.with_memory_store( rx.pipe( rs.data.roll(window=60 * 6, stride=60, pipeline=rs.ops.tee_map( rx.pipe(rs.ops.last(), ), rx.pipe( rs.ops.map(lambda i: i.temperature), rs.math.stddev(reduce=True), ), )), )), rs.ops.map(lambda i: Features(i[0].label, i[0].pspeed_ratio, i[0]. temperature, i[1])), ops.to_list()).run() print("----------------------------") print(features)
import rx from rx import operators as ops items = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon"] rx.from_(items).pipe( ops.group_by(lambda s: len(s)), ops.flat_map(lambda grp: grp.pipe(ops.to_list())) ).subscribe( lambda i: print(i) )
from rx import from_, operators as ops from_(["Alpha", "Beta", "Gamma", "Delta", "Epsilon"]).pipe(ops.to_list()).subscribe(lambda s: print(s))
import rx import rx.operators as ops numbers = rx.from_([1, 2, 3, 4]) numbers.pipe(ops.to_list()).subscribe( on_next=lambda i: print("on_next {}".format(i)), on_error=lambda e: print("on_error: {}".format(e)), on_completed=lambda: print("on_completed"))
def to_blocking(self, *args): return self.to_observable(*args).pipe(operators.to_list()).run()
import os, glob import rx import rx.operators as op from rx.scheduler import ImmediateScheduler l = rx.from_iterable(glob.glob('./train/*.png')).pipe( op.map(os.path.basename), op.map(rx.just), op.flat_map( lambda x: x.pipe( op.repeat(3), op.subscribe_on(ImmediateScheduler()), op.map_indexed(lambda x, i: x.replace(".png", f"_{i}.png")), ), ), op.to_list()).run() print(l) l = rx.from_iterable(glob.glob('./train/*.png')).pipe( op.map(os.path.basename), op.map(rx.just), op.map(op.repeat(3)), op.map(op.subscribe_on(ImmediateScheduler())), # op.flat_map(lambda x : x.pipe(op.map_indexed(lambda x,i: x.replace(".png",f"_{i}.png")))), # op.flat_map(lambda x: op.map_indexed(lambda x,i: x.replace(".png",f"_{i}.png"))(x) ), # op.flat_map(op.map_indexed(lambda x,i: x.replace(".png",f"_{i}.png"))), # op.flat_map(lambda x: print('way',x) or x), op.map(lambda x: op.map_indexed(lambda x, i: x.replace( ".png", f"_{i}.png"))(x)), op.flat_map(lambda x: x), # op.flat_map_indexed(lambda x,i: print(x,i) or x), op.map(print)).run()
def fixxBuzz(self): return from_iterable(range(1, 101)).pipe( map(lambda i: "Fizz" if i % 3 == 0 else i), to_list()).run()
from rx import from_, of, from_iterable, operators as ops, create items = ["Alpha", "Beta", "Gamma", "Delta", "Epsilon"] from_(items).pipe(ops.group_by(key_mapper=lambda s: len(s)), ops.flat_map(lambda grp: grp.pipe(ops.to_list()))).subscribe( lambda i: print(i))
def _to_list(source): if isinstance(source, rs.MuxObservable): return to_list_mux()(source) else: return ops.to_list()(source)