def open(self): scheduler = AsyncIOScheduler() print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (send) it with new values self.subject = Subject() # Get all distinct key up events from the input and only fire if long enough and distinct searcher = self.subject.pipe( ops.map(lambda x: x["term"]), ops.filter(lambda text: len(text) > 2), # Only if the text is longer than 2 characters ops.debounce(0.750), # Pause for 750ms ops.distinct_until_changed(), # Only if the value has changed ops.flat_map_latest(search_wikipedia) ) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error, scheduler=scheduler)
def main(): app = QApplication(sys.argv) scheduler = QtScheduler(QtCore) window = Window() window.show() text = 'TIME FLIES LIKE AN ARROW' def on_next(info): label, (x, y), i = info label.move(x + i*12 + 15, y) label.show() def handle_label(label, i): delayer = ops.delay(i * 0.100) mapper = ops.map(lambda xy: (label, xy, i)) return window.mousemove.pipe( delayer, mapper, ) labeler = ops.flat_map_indexed(handle_label) mapper = ops.map(lambda c: QLabel(c, window)) rx.from_(text).pipe( mapper, labeler, ).subscribe(on_next, on_error=print, scheduler=scheduler) sys.exit(app.exec_())
def test_map_disposeinsidemapper(self): scheduler = TestScheduler() xs = scheduler.create_hot_observable(on_next(100, 1), on_next(200, 2), on_next(500, 3), on_next(600, 4)) results = scheduler.create_observer() d = SerialDisposable() invoked = [0] def projection(x, *args, **kw): invoked[0] += 1 if scheduler.clock > 400: d.dispose() return x d.disposable = xs.pipe( map(projection) ).subscribe(results, scheduler) def action(scheduler, state): return d.dispose() scheduler.schedule_absolute(ReactiveTest.disposed, action) scheduler.start() assert results.messages == [on_next(100, 1), on_next(200, 2)] assert xs.subscriptions == [ReactiveTest.subscribe(0, 500)] assert invoked[0] == 3
def handle_label(label, i): delayer = ops.delay(i*0.100) mapper = ops.map(lambda xy: (label, xy, i)) return window.mousemove.pipe( delayer, mapper, )
def factory(): def projection(x): invoked[0] += 1 if invoked[0] == 3: raise Exception(ex) return x + 1 return xs.pipe(map(projection))
def create(): def mapper(x): return TimeInterval(x.value, x.interval) return xs.pipe( ops.time_interval(), ops.map(mapper), )
def create(): def mapper(x): return Timestamp(x.value, x.timestamp) return xs.pipe( ops.timestamp(), ops.map(mapper), )
def factory(): def key_mapper(x): key_invoked[0] += 1 return x.lower().strip() return xs.pipe( ops.group_by(key_mapper, lambda x: x), ops.map(lambda g: g.key), )
def handle_label(label, i): label.config(dict(borderwidth=0, padx=0, pady=0)) mapper = ops.map(lambda ev: (label, ev, i)) delayer = ops.delay(i*0.1) return mousemove.pipe( delayer, mapper )
def _all(predicate: Predicate) -> Callable[[Observable], Observable]: filtering = ops.filter(lambda v: not predicate(v)) mapping = ops.map(lambda b: not b) some = ops.some() return pipe( filtering, some, mapping )
def _pluck(key: Any) -> Callable[[Observable], Observable]: """Retrieves the value of a specified key using dict-like access (as in element[key]) from all elements in the Observable sequence. Args: key: The key to pluck. Returns a new Observable {Observable} sequence of key values. To pluck an attribute of each element, use pluck_attr. """ return ops.map(lambda x: x[key])
def factory(): def key_mapper(x): key_invoked[0] += 1 return x.lower().strip() def element_mapper(x): ele_invoked[0] += 1 return x[::-1] # Yes, this is reverse string in Python return xs.pipe( ops.group_by(key_mapper, element_mapper), ops.map(lambda g: g.key), )
def average(source: Observable) -> Observable: """Partially applied average operator. Computes the average of an observable sequence of values that are in the sequence or obtained by invoking a transform function on each element of the input sequence if present. Examples: >>> res = average(source) Args: source: Source observable to average. Returns: An observable sequence containing a single element with the average of the sequence of values. """ if key_mapper: return source.pipe( operators.map(key_mapper), operators.average() ) def accumulator(prev, cur): return AverageValue(sum=prev.sum+cur, count=prev.count+1) def mapper(s): if s.count == 0: raise Exception('The input sequence was empty') return s.sum / float(s.count) seed = AverageValue(sum=0, count=0) return source.pipe( operators.scan(accumulator, seed), operators.last(), operators.map(mapper) )
def factory(): def key_mapper(x): key_invoked[0] += 1 return x.lower().strip() def element_mapper(x): ele_invoked[0] += 1 return x[::-1] return xs.pipe( ops.group_by(key_mapper, element_mapper), ops.map(lambda g: g.key), )
def subscribe(observer, scheduler_): _scheduler = scheduler or scheduler_ or timeout_scheduler last = _scheduler.now def mapper(value): nonlocal last now = _scheduler.now span = now - last last = now return TimeInterval(value=value, interval=span) return source.pipe(ops.map(mapper)).subscribe(observer, scheduler_)
def create(): def mapper(xy): x, y = xy return "{}{}".format(x.value, y.value) return xs.pipe( ops.join( ys, lambda x: rx.timer(x.interval), lambda y: rx.timer(y.interval), ), ops.map(mapper), )
def create(): def mapper(xy): x, y = xy return "{}{}".format(x.value, y.value) return xs.pipe( ops.join( ys, lambda x: rx.timer(x.interval), lambda y: rx.timer(y.interval).pipe(ops.flat_map(rx.throw(ex) if y.value == "tin" else rx.empty())), ), ops.map(mapper), )
def observable_window_with_openings(self, window_openings, window_closing_mapper): def mapper(args): _, window = args return window return window_openings.pipe( ops.group_join( self, window_closing_mapper, lambda _: empty(), ), ops.map(mapper), )
def _pluck_attr(prop: str) -> Callable[[Observable], Observable]: """Retrieves the value of a specified property (using getattr) from all elements in the Observable sequence. Args: property: The property to pluck. Returns a new Observable {Observable} sequence of property values. To pluck values using dict-like access (as in element[key]) on each element, use pluck. """ return ops.map(lambda x: getattr(x, prop))
def factory(scheduler): nonlocal source has_accumulation = [False] accumulation = [None] def projection(x): if has_accumulation[0]: accumulation[0] = accumulator(accumulation[0], x) else: accumulation[0] = accumulator(seed, x) if has_seed else x has_accumulation[0] = True return accumulation[0] return source.pipe(ops.map(projection))
def test_map_throws(self): mapper = map(lambda x: x) with self.assertRaises(RxException): return_value(1).pipe( mapper ).subscribe(lambda x: _raise("ex")) with self.assertRaises(RxException): throw('ex').pipe( mapper ).subscribe(on_error=lambda ex: _raise(ex)) with self.assertRaises(RxException): empty().pipe( mapper ).subscribe(lambda x: x, lambda ex: ex, lambda: _raise('ex')) def subscribe(observer, scheduler=None): _raise('ex') with self.assertRaises(RxException): create(subscribe).pipe( map(lambda x: x) ).subscribe()
def factory(scheduler): has_accumulation = False accumulation = None def projection(x): nonlocal has_accumulation nonlocal accumulation if has_accumulation: accumulation = accumulator(accumulation, x) else: accumulation = accumulator(seed, x) if has_seed else x has_accumulation = True return accumulation return source.pipe(ops.map(projection))
def main(): scheduler = GtkScheduler() scrolled_window = Gtk.ScrolledWindow() window = Window() window.connect("delete-event", Gtk.main_quit) container = Gtk.Fixed() scrolled_window.add(container) window.add(scrolled_window) text = 'TIME FLIES LIKE AN ARROW' def on_next(info): label, (x, y), i = info container.move(label, x + i*12 + 15, y) label.show() def handle_label(label, i): delayer = ops.delay(i*0.100) mapper = ops.map(lambda xy: (label, xy, i)) return window.mousemove.pipe( delayer, mapper, ) def make_label(char): label = Gtk.Label(label=char) container.put(label, 0, 0) label.hide() return label mapper = ops.map(make_label) labeler = ops.flat_map_indexed(handle_label) rx.from_(text).pipe( mapper, labeler, ).subscribe(on_next, on_error=print, scheduler=scheduler) window.show_all() Gtk.main()
def create(): def right_duration_mapper(y): if len(y.value) >= 0: raise Exception(ex) else: return rx.empty() def mapper(xy): x, y = xy return "{}{}".format(x.value, y.value) return xs.pipe( ops.join( ys, lambda x: rx.timer(x.interval), right_duration_mapper, ), ops.map(mapper), )
def _max(comparer: Optional[Comparer] = None) -> Callable[[Observable], Observable]: """Returns the maximum value in an observable sequence according to the specified comparer. Examples: >>> op = max() >>> op = max(lambda x, y: x.value - y.value) Args: comparer: [Optional] Comparer used to compare elements. Returns: An operator function that takes an observable source and returns an observable sequence containing a single element with the maximum element in the source sequence. """ return pipe( ops.max_by(identity, comparer), ops.map(first_only) )
def _buffer(buffer_openings=None, buffer_closing_mapper=None) -> Callable[[Observable], Observable]: """Projects each element of an observable sequence into zero or more buffers. Args: buffer_openings -- Observable sequence whose elements denote the creation of windows. buffer_closing_mapper -- [optional] A function invoked to define the closing of each produced window. If a closing mapper function is specified for the first parameter, this parameter is ignored. Returns: A function that takes an observable source and retuerns an observable sequence of windows. """ return pipe( ops.window(buffer_openings, buffer_closing_mapper), ops.flat_map(pipe(ops.to_iterable(), ops.map(list))) )
def open(self): print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (on_next) it with new values self.subject = Subject() # Now we take on our magic glasses and project the stream of bytes into # a ... query = self.subject.pipe( # 1. stream of keycodes ops.map(lambda obj: obj["keycode"]), # 2. stream of windows (10 ints long) ops.window_with_count(10, 1), # 3. stream of booleans, True or False ops.flat_map(lambda win: win.pipe(ops.sequence_equal(codes))), # 4. stream of Trues ops.filter(lambda equal: equal) ) # 4. we then subscribe to the Trues, and signal Konami! if we see any query.subscribe(lambda x: self.write_message("Konami!"))
def _min(comparer: Callable = None) -> Callable[[Observable], Observable]: """The `min` operator. Returns the minimum element in an observable sequence according to the optional comparer else a default greater than less than check. Examples: >>> res = source.min() >>> res = source.min(lambda x, y: x.value - y.value) Args: comparer: [Optional] Comparer used to compare elements. Returns: An observable sequence containing a single element with the minimum element in the source sequence. """ return pipe( ops.min_by(identity, comparer), ops.map(first_only) )
def flat_map_latest(source: Observable) -> Observable: """Projects each element of an observable sequence into a new sequence of observable sequences by incorporating the element's index and then transforms an observable sequence of observable sequences into an observable sequence producing values only from the most recent observable sequence. Args: source: Source observable to flat map latest. Returns: An observable sequence whose elements are the result of invoking the transform function on each element of source producing an observable of Observable sequences and that at any point in time produces the elements of the most recent inner observable sequence that has been received. """ return source.pipe( ops.map(mapper), ops.switch_latest() )
def main(): root = Tk() root.title("Rx for Python rocks") scheduler = TkinterScheduler(root) mousemove = Subject() frame = Frame(root, width=600, height=600) frame.bind("<Motion>", mousemove.on_next) text = 'TIME FLIES LIKE AN ARROW' def on_next(info): label, ev, i = info label.place(x=ev.x + i*12 + 15, y=ev.y) def handle_label(label, i): label.config(dict(borderwidth=0, padx=0, pady=0)) mapper = ops.map(lambda ev: (label, ev, i)) delayer = ops.delay(i*0.1) return mousemove.pipe( delayer, mapper ) labeler = ops.flat_map_indexed(handle_label) mapper = ops.map(lambda c: Label(frame, text=c)) rx.from_(text).pipe( mapper, labeler ).subscribe(on_next, on_error=print, scheduler=scheduler) frame.pack() root.mainloop()
from rx import operators as ops import asyncio loop = asyncio.get_event_loop() done = loop.create_future() filepath = os.path.join(VIDEO_DIR, 'nascar_01.mp4') inputparameters = {} outputparameters = {} videogen = skvideo.io.vreader(filepath, inputdict=inputparameters, outputdict=outputparameters) source = rx.from_iterable(videogen).pipe(ops.map(vision.rgb_to_bgr)) dm = vision.DisplayManager('Nascar') class Player: def __init__(self): self.count = 0 def on_next(self, frame): self.count += 1 print("frame: {} {}".format(self.count, frame.shape)) dm.show(frame) vision.wait_for_esc_key(40) pass def on_error(self, error):
def mapper(ys): return ys.pipe( ops.zip(ys), ops.map(sum), )
def _map(source): if isinstance(source, rs.MuxObservable): return map_mux(mapper)(source) else: return ops.map(mapper)(source)
from rx import from_, operators as ops from_(["Alpha", "Theta", "Kappa", "Beta", "Gamma", "Delta", "Epsilon"]).pipe( ops.map(lambda s: len(s)), ops.distinct_until_changed() ).subscribe(lambda i: print(i)) from_(["Alpha", "Theta", "Kappa", "Beta", "Gamma", "Delta", "Epsilon"]).pipe( ops.distinct_until_changed(lambda s: len(s)) ).subscribe(lambda i: print(i))
def on_completed(self): print("Finished") def on_error(self, error): print("Error: {}".format(error)) some_urls = ["google", "youtube", "twitter", "stackoverflow", "localhost"] print("urls in order to be queried") print(some_urls) source = rx.create(rx_url_loop(some_urls)) source.pipe( op.filter(lambda x: x.get("time") % 2 == 0), op.map(lambda x: { "url": x.get("url").upper(), "time": x.get("time") }), op.map( lambda x: "Received: {0}, has successfully completed after {1} seconds" .format(x.get("url"), x.get("time")))).subscribe(RxAsyncRequest()) # other_urls = ["yahoo", "vk", "instagram", "reddit", "noip"] # print(other_urls) # source2 = of(*other_urls) # source2.pipe( # op.map(lambda s: len(s)), # op.filter(lambda s: s > 5) # ).subscribe(RxAsyncRequest())
rx.start() rx.timer() """Mathematical""" op.average() op.concat() op.count() op.max() op.min() op.reduce() op.sum() """Transformation""" op.buffer() op.group_by() op.map() op.scan() # ... """Filtering""" op.debounce() op.distinct() op.filter() op.element_at() op.first() op.ignore_elements() op.last() op.skip() op.skip_last() op.take() op.take_last()
def capitalize(): return op.pipe( op.map(lambda i: i.capitalize()) )
def deepspeech_server(aio_scheduler, sources): argv = sources.argv.argv stt = sources.httpd.route stt_response = sources.deepspeech.text ds_logs = sources.deepspeech.log http_ds_error, route_ds_error = make_error_router() args = parse_arguments(argv) read_request, read_response = args.pipe( ops.map(lambda i: file.Read(id='config', path=i.value)), file.read(sources.file.response), ) read_request = read_request.pipe(ops.subscribe_on(aio_scheduler), ) config = parse_config(read_response) logs_config = config.pipe( ops.flat_map( lambda i: rx.from_(i.log.level, scheduler=ImmediateScheduler())), ops.map(lambda i: logging.SetLevel(logger=i.logger, level=i.level)), ) logs = rx.merge(logs_config, ds_logs) ds_stt = stt.pipe( ops.flat_map(lambda i: i.request), ops.map( lambda i: deepspeech.SpeechToText(data=i.data, context=i.context)), ) # config is hot, the combine operator allows to keep its last value # until logging is initialized ds_arg = config.pipe( ops.map(lambda i: deepspeech.Initialize( model=i.deepspeech.model, scorer=deepspeech.Scorer( scorer=getattr(i.deepspeech, 'scorer', None), lm_alpha=getattr(i.deepspeech, 'lm_alpha', None), lm_beta=getattr(i.deepspeech, 'lm_beta', None), ), beam_width=getattr(i.deepspeech, 'beam_width', None), )), ) ds = rx.merge(ds_stt, ds_arg) http_init = config.pipe( ops.flat_map(lambda i: rx.from_([ httpd.Initialize(request_max_size=i.server.http.request_max_size), httpd.AddRoute( methods=['POST'], path='/stt', id='stt', ), httpd.StartServer(host=i.server.http.host, port=i.server.http.port ), ])), ) http_response = stt_response.pipe( route_ds_error(error_map=lambda e: httpd.Response( data="Speech to text error".encode('utf-8'), context=e.args[0].context, status=500)), ops.map(lambda i: httpd.Response( data=i.text.encode('utf-8'), context=i.context, )), ) http = rx.merge(http_init, http_response, http_ds_error) return DeepspeechSink(file=file.Sink(request=read_request), logging=logging.Sink(request=logs), deepspeech=deepspeech.Sink(speech=ds), httpd=httpd.Sink(control=http))
def expand_and_next(mbs): logging.info(f"in CalcService.fetch_values {mbs}") rx.from_(mbs).pipe(ops.map(expand_array), ops.do_action( stream.on_next)).subscribe( on_next=lambda data: logging.info(f"expand_and_next {data}"), on_error=print_error)
def mapper(w, i): return w.pipe(ops.map(lambda x: "%s %s" % (i, x)))
import rx from rx import operators as ops items = ["134/34/235/132/77", "64/22/98/112/86/11", "66/08/34/778/22/12"] rx.from_(items).pipe(ops.flat_map(lambda s: rx.from_(s.split("/"))), ops.map(lambda s: int(s))).subscribe(lambda i: print(i))
def mapper(ys, i): def proj(y): return "%s %s" % (i, y) return ys.pipe(ops.map(proj), ops.concat(rx.return_value('%s end' % i)))
rowStream = rx.create(observeRowPins(ROW_PINS)) #colStream.subscribe(on_next=lambda i: print(i)) #rowStream.subscribe(on_next=lambda i: print(i)) def findKey(xy): row, col = xy return KEYPAD[row.n][col.n] keyStream = rowStream.pipe( ops.join( colStream, lambda l: rx.timer(.01), lambda r: rx.timer(.01) ), ops.map(findKey), ) def makeColor(i): print(i) if i == 1: led.red = 1 if i == 2: led.green = 1 if i == 3: led.blue = 1 if i == 4: led.red = 0.66 if i == 5: led.green = 0.66 if i == 6:
import rx from rx import operators as ops items = ["134/34/235/132/77", "64/22/98/112/86/11", "66/08/34/778/22/12"] # RxPy v3.0 에서 부터 concat_all() 이 없어지고 merge 연산을 사용하고 max_concurrent 아규먼트를 1로 설정하도록 변경되었다. rx.from_(items).pipe( ops.map(lambda s: rx.from_(s.split("/"))), ops.merge(max_concurrent=1), ops.map(lambda s: int(s)) ).subscribe( lambda i: print(i) )
import rx from rx import operators as ops letters = rx.from_(["Alpha", "Beta", "Gamma", "Delta", "Epsilon"]) intervals = rx.interval(1.0) letters.pipe(ops.zip(intervals), ops.map(lambda z: z[0])).subscribe(lambda s: print(s)) input("Press any key to quit\n")
def left(a: Observable) -> Observable: return a.pipe(ops.map(lambda v: (v, None)))
import rx from rx import operators as ops rx.from_(["Alpha", "Beta", "Gamma", "Delta", "Epsilon"]).pipe( ops.group_by(lambda s: len(s)), ops.flat_map( lambda grp: grp.pipe(ops.count(), ops.map(lambda ct: (grp.key, ct)))), ops.to_dict(lambda key_value: key_value[0], lambda key_value: key_value[1])).subscribe(lambda i: print(i))
import rx import rx.operators as ops from rx.scheduler import NewThreadScheduler import threading import time new_thread_scheduler = NewThreadScheduler() numbers = rx.from_([1, 2, 3, 4]) subscription = numbers.pipe( ops.map(lambda i: i*2), ops.observe_on(new_thread_scheduler), ops.map(lambda i: "number is: {}".format(i)), ).subscribe( on_next=lambda i: print("on_next({}) {}" .format(threading.get_ident(), i)), on_error=lambda e: print("on_error({}): {}" .format(threading.get_ident(), e)), on_completed=lambda: print("on_completed({})" .format(threading.get_ident())) ) print("main({})".format(threading.get_ident())) time.sleep(1.0)
def extract_word(word, filter_word): return rx.from_(word).pipe( ops.filter(lambda tup: tup[1] == filter_word and len(tup[0]) > 1), ops.map(lambda tup: tup[0]), ops.reduce(lambda acc, t1: acc + " " + t1))
def add_and_multiply(nums, add_value, multiply_value): return nums.pipe( ops.map(lambda i: i + add_value), ops.map(lambda i: i * multiply_value) )
from rx import from_, interval, operators as ops source1 = interval(10).pipe(ops.map(lambda i: "Source 1: {0}".format(i))) source2 = interval(5).pipe(ops.map(lambda i: "Source 2: {0}".format(i))) source3 = interval(3).pipe(ops.map(lambda i: "Source 3: {0}".format(i))) from_([source1, source2, source3]).pipe(ops.merge_all()).subscribe(lambda s: print(s)) # keep application alive until user presses a key input("Press any key to quit\n")
def right(a: Observable) -> Observable: return a.pipe(ops.map(lambda v: (None, v)))
def worker(consumer): # num_cores = multiprocessing.cpu_count() with ProcessPoolExecutor(3) as executor: rx.from_(consumer.iterable).pipe( ops.map(lambda msg: executor.submit(consumer.process_consumer, msg) ), ).subscribe(consumer)
from rx import interval, operators as ops """ this method will give a series of values produced after a timeout. :parameter period: to start the Number sequence :return value it returns an observable with all the values in sequential order """ my_interval = interval(1.2).pipe( ops.map(lambda i: i * i), # ops.map(lambda x: x + " x") ## TypeError: unsupported operand type(s) for +: 'int' and 'str' ).subscribe(lambda x: print("value is {}".format(x))) input("press any key to exit\n")
def get_token_stream(context: Context) -> Observable: return return_value(context).pipe( op.map(validate_grant_type), op.flat_map(call_async(validate_code)), op.flat_map(call_async(get_token_by_code)), )
import rx import rx.operators as ops from time import sleep NUM_TESTRUNS_PER_CATEGORY = 3 source = rx.timer(0, 7).pipe( ops.map(lambda i: i + 1), ops.take(2), ) testresultStream = source.pipe( ops.flat_map_latest( lambda selectedJobId: getCategoryAndDependencies(selectedJobId) ), ) def getCategoryAndDependencies(selectedJobId): return rx.of(selectedJobId).pipe( ops.flat_map( lambda selectedJobId: getCategory(selectedJobId) ), ops.flat_map( lambda category: getTestruns(category) ), ops.flat_map( lambda testruns: rx.from_(testruns) ), ops.map(
import rx import rx.operators as ops def add_and_multiply(nums, add_value, multiply_value): return nums.pipe( ops.map(lambda i: i + add_value), ops.map(lambda i: i * multiply_value) ) numbers = rx.from_([1, 2, 3]).pipe( ops.map(lambda i: i + 1), ops.map(lambda i: i * 2) ) numbers.subscribe( on_next=lambda i: print("on_next {}".format(i)), on_error=lambda e: print("on_error: {}".format(e)), on_completed=lambda: print("on_completed") )
def _next_valid_id(self) -> int: self._eclient.reqIds(-1) # Argument is ignored return self._messages.pipe( _.first(lambda m: m.type == IbApiMessageType.NEXT_VALID_ID), _.map(lambda m: m.payload[0])).run()
import rx from rx import operators as ops source1 = rx.interval(1.0).pipe( ops.map(lambda i: "Source 1: {0}".format(i)) ) source2 = rx.interval(0.5).pipe( ops.map(lambda i: "Source 2: {0}".format(i)) ) source3 = rx.interval(0.3).pipe( ops.map(lambda i: "Source 3: {0}".format(i)) ) rx.merge(source1, source2, source3).subscribe(lambda s: print(s)) # keep application alive until user presses a key input("Press any key to quit\n")
from rx import of, operators as op import rx ob = of(1, 2, 34, 5, 6, 7, 7) subscription = ob.pipe( op.map(lambda i: i ** 2), op.filter(lambda i: i >= 10) ).subscribe(lambda i: print(f'Received: {i}')) def capitalize(): return op.pipe( op.map(lambda i: i.capitalize()) ) of('ab', 'cd', 'efg').pipe(capitalize()).subscribe(lambda i: print(f'Received: {i}'))