def open(self): scheduler = AsyncIOScheduler(asyncio.get_event_loop()) print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (send) it with new values self.subject = Subject() # Get all distinct key up events from the input and only fire if long enough and distinct searcher = self.subject.pipe( ops.map(lambda x: x["term"]), ops.filter(lambda text: len(text) > 2), # Only if the text is longer than 2 characters ops.debounce(0.750), # Pause for 750ms ops.distinct_until_changed(), # Only if the value has changed ops.flat_map_latest(search_wikipedia) ) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error, scheduler=scheduler)
def open(self): scheduler = AsyncIOScheduler() print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (send) it with new values self.subject = Subject() # Get all distinct key up events from the input and only fire if long enough and distinct searcher = self.subject.pipe( ops.map(lambda x: x["term"]), ops.filter(lambda text: len(text) > 2), # Only if the text is longer than 2 characters ops.debounce(0.750), # Pause for 750ms ops.distinct_until_changed(), # Only if the value has changed ops.flat_map_latest(search_wikipedia) ) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error, scheduler=scheduler)
def __init__(self, source_map, crypto="BTC", currency="USD", debounce_interval=2): """ :param source_map: (dict) that maps the details of an API :param crypto: cryptocurrency code :param currency: fiat currency code """ self.crypto = crypto.upper() self.currency = currency.upper() self.source = source_map[self.get_exchange()] self.websocket_msg = self.source["{}{}".format( self.crypto, self.currency)]["message"] self.url = self.source["url"] self.subject = Subject() filtered_data = self.subject.pipe(rx_ops.debounce(debounce_interval), rx_ops.distinct_until_changed()) def on_error(err): self.log(msg=f"subject error: {err}") filtered_data.subscribe(on_next=self.get_on_next(), on_error=on_error)
def __init__(self): self.addSegmentConfigSrc = Subject() self.addSegmentConfigSink = self.addSegmentConfigSrc.pipe( ops.map(lambda _: Repo().addSegment())) self.removeSegmentConfigSrc = Subject() self.removeSegmentConfigSink = self.removeSegmentConfigSrc.pipe() self.removeSegmentConfigSrc.subscribe( lambda key: Repo().removeSegment(key)) self.updateSegmentSrc = Subject() self.updateSegmentSink = self.updateSegmentSrc.pipe( ops.map(Repo().updateSegment)) # self.updateSegmentSink.subscribe(lambda _: _a) self.generateSegmentsSrc = Subject() self.retriveKnobTendonModels = self.generateSegmentsSrc.pipe( ops.map(Repo().generateSegments)) self.retriveKnobTendonModels.subscribe(lambda _: None) self.updateTensionsSrc = Subject() self.updateTensionsSrc.subscribe(Repo().updateTensions) self.computeTensionsSrc = Subject() self.computeTensionsSink = self.computeTensionsSrc.pipe( ops.map(lambda _: Repo().computeTensions())) self.graphResizeUpdateSrc = Subject() self.graphResizeUpdateSink = self.graphResizeUpdateSrc.pipe( ops.debounce(2.0))
def run(self): '''Subscribes websocket server to changes to the store.''' self.disposer = self.store.stream_changes.pipe( debounce(1) ).subscribe(self) self.socketio.run(self.app, host='0.0.0.0', debug=False, use_reloader=False)
def __init__(self, source_map, crypto="BTC", currency="USD", debounce_interval=2): self.crypto = crypto.upper() self.currency = currency.upper() self.url = source_map[self.get_exchange()] self.subject = Subject() filtered_data = self.subject.pipe(rx_ops.debounce(debounce_interval), rx_ops.distinct_until_changed()) def on_error(err): self.log(msg=f"subject error: {err}") filtered_data.subscribe(on_next=self.get_on_next(), on_error=on_error)
def open(self): print("WebSocket opened") self.stream = Subject() searcher = self.stream.pipe(ops.map(lambda x: x["term"]), ops.filter(lambda text: len(text) > 2), ops.debounce(0.750), ops.distinct_until_changed(), ops.flat_map_latest(search_wikipedia)) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error, scheduler=scheduler)
def __init__(self, client: MQTTClientWrapper, name="fp50", config=None): super().__init__(name, config=config) self.client = client self.topic_base = self.config["waterBath"][name]["topicBase"] self.topic = self.topic_base + "/setpoint" self.message_subject = Subject() self.client.subscribe(self.topic_base + "/crystallizer_temperature") self.client.subscribe(self.topic_base + "/setpoint") self.interval_scheduler = NewThreadScheduler() def update(x, scheduler=None): self.client.publish(self.topic_base + "/crystallizer_temperature", None) rx.interval(self.config["waterBath"][name]["interval"], self.interval_scheduler).subscribe(update) def convert(x): payloads = [xx[2].payload for xx in x] for p in payloads: if not p: # skipping conversion request return None return { # "power": float(payloads[0]), # "internal_temperature": float(payloads[1]), "crystallizer_temperature": float(payloads[0]), "setpoint": float(payloads[1]), } rx.combine_latest( from_callback( self.client.message_callback_add)(self.topic_base + "/crystallizer_temperature"), from_callback(self.client.message_callback_add)(self.topic_base + "/setpoint"), ).pipe(operators.map(convert), operators.filter(lambda x: x is not None), operators.debounce(0.6)).subscribe(self.message_subject)
async def main(loop): scheduler = AsyncIOScheduler(loop) finder = WikipediaFinder(loop) stream = Subject() def task(term): t = loop.create_task(finder.search(term)) return rx.from_future(t) def pretty(result): parsed = json.loads(result) print(json.dumps(parsed, sort_keys=True, indent=2)) stream.pipe( ops.debounce(0.750), ops.distinct(), ops.flat_map_latest(task) ).subscribe(pretty, scheduler=scheduler) def reader(): line = sys.stdin.readline().strip() stream.on_next(line) loop.add_reader(sys.stdin.fileno(), reader)
def create(): return xs.pipe(_.debounce(50))
def create(): return never().pipe(_.debounce(10))
def run(self): '''Subscribes the printer to changes to the store.''' logger.info('Starting stream printer...') self.disposer = self.store.stream_changes.pipe( debounce(1)).subscribe(self)
def create(): return e1.pipe(ops.debounce(5), )
def audio_encoder(sources): # Parse configuration parser = create_arg_parser() parsed_argv = sources.argv.argv.pipe( ops.skip(1), argparse.parse(parser), ops.filter(lambda i: i.key == 'config'), ops.subscribe_on(aio_scheduler), ops.share(), ) # monitor and parse config file monitor_init = parsed_argv.pipe( ops.flat_map(lambda i: rx.from_([ inotify.AddWatch( id='config', path=i.value, flags=aionotify.Flags.MODIFY), inotify.Start(), ]))) config_update = sources.inotify.response.pipe( ops.debounce(5.0, scheduler=aio_scheduler), ops.map(lambda i: True), ops.start_with(True), ) read_request, read_response = rx.combine_latest( parsed_argv, config_update).pipe( ops.starmap( lambda config, _: file.Read(id='config', path=config.value)), file.read(sources.file.response), ) config = read_response.pipe( ops.filter(lambda i: i.id == "config"), ops.flat_map(lambda i: i.data), parse_config, ) # Transcode request handling encode_init = config.pipe( ops.map(lambda i: i.encode), ops.distinct_until_changed(), ops.map(lambda i: encoder.Configure(samplerate=i.samplerate, bitdepth=i.bitdepth)), ) encode_request = sources.httpd.route.pipe( ops.filter(lambda i: i.id == 'flac_transcode'), ops.flat_map(lambda i: i.request), ops.flat_map(lambda i: rx.just(i, encode_scheduler)), ops.map(lambda i: encoder.EncodeMp3( id=i.context, data=i.data, key=i.match_info['key'])), ) encoder_request = rx.merge(encode_init, encode_request) # store encoded file store_requests = sources.encoder.response.pipe( ops.observe_on(s3_scheduler), ops.map(lambda i: s3.UploadObject( key=i.key + '.flac', data=i.data, id=i.id, )), ) # acknowledge http request http_response = sources.s3.response.pipe( ops.map(lambda i: httpd.Response( data='ok'.encode('utf-8'), context=i.id, ))) # http server http_init = config.pipe( ops.take(1), ops.flat_map(lambda i: rx.from_([ httpd.Initialize(request_max_size=0), httpd.AddRoute( methods=['POST'], path='/api/transcode/v1/flac/{key:[a-zA-Z0-9-\._]*}', id='flac_transcode', ), httpd.StartServer(host=i.server.http.host, port=i.server.http.port ), ])), ) http = rx.merge(http_init, http_response) # s3 database s3_init = config.pipe( ops.take(1), ops.map(lambda i: s3.Configure( access_key=i.s3.access_key, secret_key=i.s3.secret_key, bucket=i.s3.bucket, endpoint_url=i.s3.endpoint_url, region_name=i.s3.region_name, )), ) # merge sink requests file_requests = read_request s3_requests = rx.merge(s3_init, store_requests) return Sink( encoder=encoder.Sink(request=encoder_request), s3=s3.Sink(request=s3_requests), file=file.Sink(request=file_requests), httpd=httpd.Sink(control=http), inotify=inotify.Sink(request=monitor_init), )
op.concat() op.count() op.max() op.min() op.reduce() op.sum() """Transformation""" op.buffer() op.group_by() op.map() op.scan() # ... """Filtering""" op.debounce() op.distinct() op.filter() op.element_at() op.first() op.ignore_elements() op.last() op.skip() op.skip_last() op.take() op.take_last() # ... """Error Handling""" op.catch() op.retry()
def create(): return empty().pipe(_.debounce(10))
def create(): return throw(ex).pipe(_.debounce(10))
def create(): return e1.pipe( ops.debounce(5), )