def get_pipeline_run_observable(graphene_info, run_id, after=None): from ...schema.pipelines.pipeline import GraphenePipelineRun from ...schema.pipelines.subscription import ( GraphenePipelineRunLogsSubscriptionFailure, GraphenePipelineRunLogsSubscriptionSuccess, ) from ..events import from_event_record check.inst_param(graphene_info, "graphene_info", ResolveInfo) check.str_param(run_id, "run_id") check.opt_int_param(after, "after") instance = graphene_info.context.instance run = instance.get_run_by_id(run_id) if not run: def _get_error_observable(observer): observer.on_next( GraphenePipelineRunLogsSubscriptionFailure( missingRunId=run_id, message="Could not load run with id {}".format(run_id))) return Observable.create(_get_error_observable) # pylint: disable=E1101 # pylint: disable=E1101 return Observable.create( PipelineRunObservableSubscribe(instance, run_id, after_cursor=after) ).map(lambda events: GraphenePipelineRunLogsSubscriptionSuccess( run=GraphenePipelineRun(run), messages= [from_event_record(event, run.pipeline_name) for event in events], ))
def add_route(type, path): def on_get_route_subscribe(observer): def on_get_data(request): observer.on_next(request) return web.Response(text="Hello, world") app.router.add_get(path, on_get_data) def on_post_route_subscribe(observer): async def on_post_data(request, path): data = await request.read() response = web.StreamResponse(status=200, reason=None) await response.prepare(request) observer.on_next({ "what": "data", "path": path, "data": data, "context": (request, response) }) return response app.router.add_post(path, lambda r: on_post_data(r, path)) route_observable = None if (type == "GET"): route_observable = Observable.create(on_get_route_subscribe) elif (type == "POST"): route_observable = Observable.create(on_post_route_subscribe) return route_observable
def test_create_observer_throws(): def subscribe(o): o.on_next(1) return lambda: None try: return Observable.create(subscribe).subscribe(lambda x: _raise('ex')) except RxException: pass def subscribe2(o): o.on_error('exception') return lambda: None try: return Observable.create(subscribe2).subscribe( on_error=lambda ex: _raise('ex')) except RxException: pass def subscribe3(o): o.on_completed() return lambda: None try: return Observable.create(subscribe3).subscribe( on_complete=lambda: _raise('ex')) except RxException: pass
def main(): event_publisher = MqttClient() object_positions = Observable.create(ObjectReceiver).share() events = Observable.create(TrackingReceiver).share() EventSubscriber([object_positions, events]) event_publisher.client.publish("tracking/status", payload="start_tracking", qos=2) event_publisher.client.publish("data/position", payload="[1, (2,2,34)", qos=2) event_publisher.client.publish("data/object", payload="[1, car]", qos=2) event_publisher.client.publish("data/position", payload="[1, (2,2,37)", qos=2) event_publisher.client.publish("data/object", payload="[1, car]", qos=2) event_publisher.client.publish("data/position", payload="[2, (0,0,2)", qos=2) event_publisher.client.publish("data/object", payload="[2, person]", qos=2) sleep(1) event_publisher.client.publish("tracking/status", payload="stop_tracking", qos=2) sleep(1)
def create3(): Observable.create(throw_error).transduce( compose( filtering(even), mapping(mul10)) ).subscribe() self.assertRaises(RxException, create3)
def get_pipeline_run_observable(graphene_info, run_id, after=None): check.inst_param(graphene_info, 'graphene_info', ResolveInfo) check.str_param(run_id, 'run_id') check.opt_str_param(after, 'after') instance = graphene_info.context.instance run = instance.get_run(run_id) if not run: def _get_error_observable(observer): observer.on_next( graphene_info.schema.type_named( 'PipelineRunLogsSubscriptionMissingRunIdFailure')( missingRunId=run_id)) return Observable.create(_get_error_observable) # pylint: disable=E1101 pipeline = get_dauphin_pipeline_from_selector(graphene_info, run.selector) execution_plan = create_execution_plan(pipeline.get_dagster_pipeline(), run.environment_dict, RunConfig(mode=run.mode)) # pylint: disable=E1101 return Observable.create( PipelineRunObservableSubscribe( instance, run_id, after_cursor=after)).map( lambda events: graphene_info.schema. type_named('PipelineRunLogsSubscriptionSuccess')( runId=run_id, messages=[ from_event_record(graphene_info, event, pipeline, execution_plan) for event in events ], ))
def get_pipeline_run_observable(graphene_info, run_id, after=None): check.inst_param(graphene_info, 'graphene_info', ResolveInfo) check.str_param(run_id, 'run_id') check.opt_int_param(after, 'after') instance = graphene_info.context.instance run = instance.get_run_by_id(run_id) if not run: def _get_error_observable(observer): observer.on_next( graphene_info.schema. type_named('PipelineRunLogsSubscriptionFailure')( missingRunId=run_id, message='Could not load run with id {}'.format(run_id))) return Observable.create(_get_error_observable) # pylint: disable=E1101 # pylint: disable=E1101 return Observable.create( PipelineRunObservableSubscribe(instance, run_id, after_cursor=after) ).map(lambda events: graphene_info.schema.type_named( 'PipelineRunLogsSubscriptionSuccess')( run=graphene_info.schema.type_named('PipelineRun')(run), messages= [from_event_record(event, run.pipeline_name) for event in events], ))
async def run(sel, loop): def create_socket_observable(port, observer): sock = socket.socket() sock.bind(('localhost', port)) sock.listen(100) sock.setblocking(False) sel.register( sock, selectors.EVENT_READ, partial(accept, observer, sel) ) socket_1234 = Observable.create( partial(create_socket_observable, 1234) ) socket_1235 = Observable.create( partial(create_socket_observable, 1235) ) source = socket_1234.merge(socket_1235).share() source.subscribe( logging.info ) source.where( lambda msg: "error" in msg ).subscribe( logging.error )
def class_six(): def push_numbers(observer): observer.on_next(300) observer.on_next(500) observer.on_next(700) observer.on_completed() Observable.create(push_numbers).subscribe(print)
def get_pipeline_run_observable(graphene_info, run_id, after=None): check.inst_param(graphene_info, 'graphene_info', ResolveInfo) check.str_param(run_id, 'run_id') check.opt_int_param(after, 'after') instance = graphene_info.context.instance run = instance.get_run(run_id) if not run: def _get_error_observable(observer): observer.on_next( graphene_info.schema. type_named('PipelineRunLogsSubscriptionFailure')( missingRunId=run_id, message='Could not load run with id {}'.format(run_id))) return Observable.create(_get_error_observable) # pylint: disable=E1101 if not instance.can_watch_events: def _get_error_observable(observer): observer.on_next( graphene_info.schema.type_named( 'PipelineRunLogsSubscriptionFailure') (message= 'Event log storage on current DagsterInstance is not watchable.' )) return Observable.create(_get_error_observable) # pylint: disable=E1101 pipeline = get_dauphin_pipeline_reference_from_selector( graphene_info, run.selector) from ..schema.pipelines import DauphinPipeline if not isinstance(pipeline, DauphinPipeline): return Observable.empty() # pylint: disable=no-member execution_plan = create_execution_plan(pipeline.get_dagster_pipeline(), run.environment_dict, RunConfig(mode=run.mode)) # pylint: disable=E1101 return Observable.create( PipelineRunObservableSubscribe( instance, run_id, after_cursor=after)).map( lambda events: graphene_info.schema. type_named('PipelineRunLogsSubscriptionSuccess')( runId=run_id, messages=[ from_event_record(graphene_info, event, pipeline, execution_plan) for event in events ], ))
def start(): parser = argparse.ArgumentParser() parser.add_argument("--command", type=str) parser.add_argument("--config", type=str) args = parser.parse_args() cfg = lya.AttrDict.from_yaml(args.config) Observable.create(partial(observe_process, command=args.command))\ .buffer_with_time(cfg.timespan, scheduler=timeout_scheduler)\ .where(lambda buffer: len(buffer) >= cfg.count_trigger)\ .subscribe(CommandObserver(cfg.telegram.bot_token, cfg.telegram.chat_id, cfg.video_source))
def processNewActivity(link, token, serialKey, uid, callback): def callApi(observer): params = {'imei': serialKey, 'uid': uid} url = link + '/wal-program/activities/new' headers = {'Content-type': 'application/json', 'wtoken': token} response = requests.get(url, headers=headers, params=params) if response.status_code >= 300: observer.on_error(response.text) observer.on_next(response.text) def onError(error): logging.error(error) Observable.create(callApi).subscribe(on_next=callback, on_error=onError)
def test_Retry_Observable_Throws(): scheduler1 = TestScheduler() xs = Observable.return_value(1, scheduler1).retry() xs.subscribe(lambda x: _raise('ex')) try: return scheduler1.start() except RxException: pass scheduler2 = TestScheduler() ys = Observable.throw_exception('ex', scheduler2).retry() d = ys.subscribe(on_error=lambda ex: _raise('ex')) scheduler2.schedule_absolute(210, lambda: d.dispose()) scheduler2.start() scheduler3 = TestScheduler() zs = Observable.return_value(1, scheduler3).retry() zs.subscribe(on_completed=lambda: _raise('ex')) try: return scheduler3.start() except RxException: pass xss = Observable.create(lambda o: _raise('ex')).retry() try: return xss.subscribe() except RxException: pass
def on_property_change(self, td, name, qos=QOS_0): """Subscribes to property changes on a remote Thing. Returns an Observable""" forms = td.get_property_forms(name) href = self._pick_mqtt_href(td, forms, op=InteractionVerbs.OBSERVE_PROPERTY) if href is None: raise FormNotFoundException() parsed_href = self._parse_href(href) broker_url = parsed_href["broker_url"] topic = parsed_href["topic"] def next_item_builder(msg_data): msg_value = msg_data.get("value") init = PropertyChangeEventInit(name=name, value=msg_value) return PropertyChangeEmittedEvent(init=init) subscribe = self._build_subscribe(broker_url=broker_url, topic=topic, next_item_builder=next_item_builder, qos=qos) # noinspection PyUnresolvedReferences return Observable.create(subscribe)
def market_observable(self): assert self._client, 'Befair must first be initialized' if not self._market_observable: def observe_market(observer): class FakeQueue: def put(self, value): observer.on_next(value) listener = betfairlightweight.StreamListener( output_queue=FakeQueue(), lightweight=False) self._market_stream = self._client.streaming.create_stream( listener=listener, description='BFG Market Stream') market_filter = streaming_market_filter( market_ids=self.todays_racecard) market_data_filter = streaming_market_data_filter( fields=['EX_TRADED_VOL', 'EX_LTP', 'EX_MARKET_DEF'], #'EX_BEST_OFFERS', 'EX_TRADED', # ladder_levels=10, # Market depth ) self._market_stream.subscribe_to_markets( market_filter=market_filter, market_data_filter=market_data_filter, # conflate_ms=1000, ) self._market_stream.start(_async=True) # use subscribe_on else we are using the market thread for all operations (async False so we use main thread) # use share so multiple subscriber can use this observable but we start emitting after the first has subscribed self._market_observable = Observable.create(observe_market) # .share() return self._market_observable
def test_repeat_observable_repeat_count_throws(): scheduler1 = TestScheduler() xs = Observable.return_value(1, scheduler1).repeat(3) xs.subscribe(lambda x: _raise('ex')) try: return scheduler1.start() except RxException: pass scheduler2 = TestScheduler() ys = Observable.throwException('ex1', scheduler2).repeat(3) ys.subscribe(lambda ex: _raise('ex2')) try: return scheduler2.start() except RxException: pass scheduler3 = TestScheduler() zs = Observable.return_value(1, scheduler3).repeat(100) d = zs.subscribe(on_complete=lambda: _raise('ex3')) scheduler3.schedule_absolute(10, lambda: d.dispose()) scheduler3.start() xss = Observable.create(lambda o: _raise('ex4')).repeat(3) try: return xss.subscribe() except RxException: pass
def make_crossbar(pull_result): request_observer = None def crossbar(request, match): def crossbar_subscribe(observer): def on_next(i): if match(i): observer.on_next(i) observer.on_completed() dispose() return dispose = pull_result.subscribe( on_next=on_next) #scheduler=) request_observer.on_next(request) return Observable.create(crossbar_subscribe) def on_subscribe(observer): nonlocal request_observer request_observer = observer pull_request = Observable.create(on_subscribe) return pull_request, crossbar
def create_http_observable(request_obj, cookie_store=None, cache_store=None, http_client=None): """ 创建http观察对象 :param request_obj: :param cookie_store: :param cache_store: :param http_client: :return: """ from tool_package.rx_http_request.http_request import RespondListener from tool_package.rx_http_request.http_request import RequestClient def create_http(observer): respond_listener = RespondListener(observer) try: http_client.request(request_obj, respond_listener) except Exception as e: respond_listener.on_fail(e) if http_client is None: http_client = RequestClient(cookie_store, cache_store) return Observable.create(create_http)
def websocket_receive(self, message): request = json.loads(message['text']) id = request.get('id') if request['type'] == 'connection_init': return elif request['type'] == 'start': payload = request['payload'] context = AttrDict(self.scope) context.subscribe = functools.partial(self._subscribe, id) stream = StreamObservable() result = schema.execute( payload['query'], operation_name=payload['operationName'], variable_values=payload['variables'], context_value=context, root_value=Observable.create(stream).share(), allow_subscriptions=True, ) if hasattr(result, 'subscribe'): result.subscribe(functools.partial(self._send_result, id)) self.subscriptions[id] = stream else: self._send_result(id, result) elif request['type'] == 'stop': self._unsubscribe(id) if id in self.subscriptions: del self.subscriptions[id]
def create_text_stream(): def on_text_stream_subscribe(o): nonlocal text_observer text_observer = o text_observable = Observable.create(on_text_stream_subscribe) return text_observable
def on_request_item(i): if type(i) is Context: observer.on_next( Context( i.id, Observable.create( functools.partial(on_context_subscribe, i.observable)))) elif type(i) is Read: with open(i.path, i.mode) as content_file: content = content_file.read(i.size) data = Observable.just(content) observer.on_next( ReadResponse(id=i.id, path=i.path, data=data)) elif type(i) is ReadLine: content_file = open(i.path) data = Observable.from_(content_file) observer.on_next( ReadResponse(id=i.id, path=i.path, data=data)) elif type(i) is Write: if i.mkdirs is True: os.makedirs(os.path.split(i.path)[0], exist_ok=True) with open(i.path, i.mode) as content_file: size = content_file.write(i.data) status = 0 if size == len(i.data) else -1 observer.on_next( WriteResponse(id=i.id, path=i.path, status=status)) else: observer.on_error("file unknown command: {}".format(i))
async def tcp_client(host, port): def on_connection_subscribe(observer, reader, writer): async def handle_connection(observer, reader, writer): while True: try: data = await reader.read(100) if data == b'': break loop.call_soon(observer.on_next, Data(data=data)) except Exception as e: loop.call_soon(observer.on_error(e)) break loop.call_soon(observer.on_completed) writer.close() asyncio.ensure_future( handle_connection(observer, reader, writer)) try: reader, writer = await asyncio.open_connection(host, port, loop=loop) connection = Observable.create( lambda o: on_connection_subscribe(o, reader, writer)) observer.on_next( Connection(id=writer, observable=connection)) except Exception as e: loop.call_soon(observer.on_error(e))
def __init__(self, server_address): self.logger = logging.getLogger('IpcClient') self.addr = server_address self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.receive_msg = False self.messages_lock = Lock() self.income_observable = Observable.create(self.init_income_observable)
def get_pipeline_run_observable(graphene_info, run_id, after=None): check.inst_param(graphene_info, 'graphene_info', ResolveInfo) check.str_param(run_id, 'run_id') check.opt_str_param(after, 'after') pipeline_run_storage = graphene_info.context.pipeline_runs run = pipeline_run_storage.get_run_by_id(run_id) if not run: def _get_error_observable(observer): observer.on_next( graphene_info.schema.type_named( 'PipelineRunLogsSubscriptionMissingRunIdFailure')( missingRunId=run_id)) return Observable.create(_get_error_observable) # pylint: disable=E1101 def get_observable(pipeline): return run.observable_after_cursor(after).map( lambda events: graphene_info.schema.type_named( 'PipelineRunLogsSubscriptionSuccess')( runId=run_id, messages=[ from_event_record(graphene_info, event, pipeline, run. execution_plan) for event in events ], )) return get_observable( get_dauphin_pipeline_from_selector(graphene_info, run.selector))
def create(): def subscribe(o): o.on_next(1) o.on_next(2) return lambda: None return Observable.create(subscribe)
def on_event(self, td, name, qos=QOS_0): """Subscribes to an event on a remote Thing. Returns an Observable.""" forms = td.get_event_forms(name) href = self._pick_mqtt_href(td, forms, op=InteractionVerbs.SUBSCRIBE_EVENT) if href is None: raise FormNotFoundException() parsed_href = self._parse_href(href) broker_url = parsed_href["broker_url"] topic = parsed_href["topic"] def next_item_builder(msg_data): return EmittedEvent(init=msg_data.get("data"), name=name) subscribe = self._build_subscribe(broker_url=broker_url, topic=topic, next_item_builder=next_item_builder, qos=qos) # noinspection PyUnresolvedReferences return Observable.create(subscribe)
def driver(sink): def on_subscribe(o): nonlocal observer observer = o def on_request_item(i): nonlocal handlers nonlocal observer if type(i) is Log: logging.getLogger(i.logger).log(i.level, i.message) elif type(i) is SetLevel: level = level_from_string(i.level) logger = logging.getLogger(i.logger) logger.setLevel(level) if i.logger in handlers: logger.removeHandler(handlers[i.logger]) handlers[i.logger] = logging.StreamHandler() handlers[i.logger].setLevel(level) logger.addHandler(handlers[i.logger]) elif type(i) is SetLevelDone: if observer is not None: observer.on_next(i) else: if observer is not None: observer.on_error("invalid item: {}".format(i)) sink.request.subscribe(on_request_item) return Source(response=Observable.create(on_subscribe))
def encoder(sink): def on_subscribe(observer): samplerate = None bitdepth = None def on_next(item): nonlocal samplerate nonlocal bitdepth if type(item) is Configure: print("configure: {}".format(item)) samplerate = item.samplerate bitdepth = item.bitdepth elif type(item) is EncodeMp3: encoded_data = mp3_to_flac(item.data, samplerate, bitdepth) observer.on_next( EncodeResult(id=item.id, key=item.key, data=encoded_data)) else: observer.on_error("unknown item: {}".format(type(item))) sink.request.subscribe( on_next=on_next, on_error=lambda e: observer.on_error(e), on_completed=lambda: observer.on_completed(), ) return Source(response=Observable.create(on_subscribe))
def observable(self, run_id, key, io_type, cursor=None): """Return an Observable which streams back log data from the execution logs for a given compute step. Args: run_id (str): The id of the pipeline run. key (str): The unique descriptor of the execution step (e.g. `solid_invocation.compute`) io_type (ComputeIOType): Flag indicating the I/O type, either stdout or stderr cursor (Optional[Int]): Starting cursor (byte) of log file Returns: Observable """ check.str_param(run_id, "run_id") check.str_param(key, "key") check.inst_param(io_type, "io_type", ComputeIOType) check.opt_str_param(cursor, "cursor") if cursor: cursor = int(cursor) else: cursor = 0 subscription = ComputeLogSubscription(self, run_id, key, io_type, cursor) self.on_subscribe(subscription) return Observable.create(subscription) # pylint: disable=E1101
def observable(self) -> Observable: logger.debug(f"Creating stream observable.") def subscribe(obs): logger.debug(f"Subscribed to stream observable.") async def push_values(): while True: try: obs.on_next(await self.decode()) except EOFError: if self.available.is_set(): logger.info( "Orchestrator shut down. Attempting to reconnect." ) await self.reconnect() else: await self.available.wait() except asyncio.futures.CancelledError: logger.warning("Push-values future cancelled") return logger.error("Push-values unexpectedly shutdown.") task = asyncio.ensure_future(push_values()) def dispose(): logger.debug(f"Disposed of stream observable subscription.") task.cancel() return dispose return Observable.create(subscribe).subscribe_on(scheduler).share()
def driver(sink): def on_subscribe(observer): watcher = aionotify.Watcher() async def read_events(): nonlocal observer await watcher.setup(loop) while True: event = await watcher.get_event() loop.call_soon(observer.on_next, Event(id=event.alias, path=event.name)) watcher.close() def on_next(item): if type(item) is AddWatch: watcher.watch(alias=item.id, path=item.path, flags=item.flags) elif type(item) is Start: asyncio.ensure_future(read_events()) else: observer.on_error("unknown item: {}".format(type(item))) sink.request.subscribe(on_next=on_next, on_error=lambda e: observer.on_error(e)) return Source(response=Observable.create(on_subscribe))
def create(): def subscribe(o): is_stopped = [False] o.on_next(1) o.on_next(2) def action1(scheduler, state): if not is_stopped[0]: return o.on_next(3) scheduler.schedule_relative(600, action1) def action2(scheduler, state): if not is_stopped[0]: return o.on_next(4) scheduler.schedule_relative(700, action2) def action3(scheduler, state): if not is_stopped[0]: return o.on_next(5) scheduler.schedule_relative(900, action3) def action4(scheduler, state): if not is_stopped[0]: return o.on_next(6) scheduler.schedule_relative(1100, action4) def dispose(): is_stopped[0] = True return dispose return Observable.create(subscribe)
def create_data_stream(name): def on_subscribe(o, name): nonlocal data_observer data_observer[name] = o data_observable = Observable.create(lambda o: on_subscribe(o, name)) return data_observable
def create(): def subscribe(o): o.on_error(ex) o.on_next(100) o.on_error('foo') o.on_completed() return lambda: None return Observable.create(subscribe)
def factory(): count[0] += 1 def create(obs): def func(): disconnected[0] = True return func return Observable.create(create)
def test_create_observer_throws(self): def subscribe(o): o.on_next(1) return lambda: None try: return Observable.create(subscribe).subscribe(lambda x: _raise('ex')) except RxException: pass def subscribe2(o): o.on_error('exception') return lambda: None try: return Observable.create(subscribe2).subscribe(on_error=lambda ex: _raise('ex')) except RxException: pass def subscribe3(o): o.on_completed() return lambda: None try: return Observable.create(subscribe3).subscribe(on_complete=lambda: _raise('ex')) except RxException: pass
def replicate_many(observables, subjects): def create_observer(observer): subscription = CompositeDisposable() for name in observables.keys(): if not subjects[name].is_disposed: subscription.add(observables[name].subscribe(observer=subjects[name], on_error=log_error)) observer.on_next(subscription) def dispose(): subscription.dispose() for x in subjects: if hasattr(subjects, x): subjects[x].dispose() return dispose return Observable.create(create_observer)
def tweets_for(topics): def observe_tweets(observer): class TweetListener(StreamListener): def on_data(self, raw_data): observer.on_next(raw_data) return True def on_error(self, status_code): observer.on_error(status_code) l = TweetListener() keys = get_twitter_credentials() auth = OAuthHandler(keys["consumer_key"], keys["consumer_secret"]) auth.set_access_token(keys["access_token"], keys["access_token_secret"]) stream = Stream(auth, l) stream.filter(track=topics) return Observable.create(observe_tweets).share()
def test_create_observer_throws(self): def subscribe(o): o.on_next(1) return lambda: None with self.assertRaises(RxException): Observable.create(subscribe).subscribe(lambda x: _raise("ex")) def subscribe2(o): o.on_error("exception") return lambda: None with self.assertRaises(RxException): Observable.create(subscribe2).subscribe(on_error=lambda ex: _raise("ex")) def subscribe3(o): o.on_completed() return lambda: None with self.assertRaises(RxException): Observable.create(subscribe3).subscribe(on_completed=lambda: _raise("ex"))
""" This is a simple demonstration of a custom Observable implementing on_next, on_error and on_completed callbacks This is used when one wants to have their own callbacks executed when a stream comes in """ from rx import Observable, Observer def push_strings(observer): observer.on_next("Alpha") observer.on_next("Beta") observer.on_next("Gamma") observer.on_next("Delta") observer.on_next("Epsilon") observer.on_completed() class PrintObserver(Observer): def on_next(self, value): print("Received {}".format(value)) def on_error(self, error): print("Error encountered {}".format(error)) def on_completed(self): print("Done") source = Observable.create(push_strings) source.subscribe(PrintObserver())
def test_create_exception(self): try: return Observable.create(lambda o: _raise('ex')).subscribe() except RxException: pass
def test_create_exception(self): with self.assertRaises(RxException): Observable.create(lambda o: _raise("ex")).subscribe()