class Node(object): def __init__(self, node_dict): self.data = Subject() self.attrs = node_dict def send_to(self, to_node): self.data.subscribe(to_node.on_data) def compute_output(self, data): return self.transform(data, self.attrs.get('transformAttributes', {})) def set_transform(self, transform): self.transform = transform if self._data is not None: self.data.on_next(self.compute_output(self._data)) @staticmethod def create(node_dict): from .data import DataNode from .transform import TransformNode from .sink import SinkNode node_type_map = { 'DATA_NODE': DataNode, 'TRANSFORM_NODE': TransformNode, 'SINK_NODE': SinkNode } node_type = node_dict['nodeType'] return node_type_map[node_type](node_dict)
class WSHandler(WebSocketHandler): def open(self): print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (on_next) it with new values self.stream = Subject() # Get all distinct key up events from the input and only fire if long enough and distinct query = ( self.stream.map(lambda x: x["term"]) .filter(lambda text: len(text) > 2) # Only if the text is longer than 2 characters .debounce(0.750, scheduler=scheduler) # Pause for 750ms .distinct_until_changed() ) # Only if the value has changed searcher = query.flat_map_latest(search_wikipedia) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error) def on_message(self, message): obj = json_decode(message) self.stream.on_next(obj) def on_close(self): print("WebSocket closed")
def test_accepts_multiple_subscription_fields_defined_in_schema(): SubscriptionTypeMultiple = GraphQLObjectType( name='Subscription', fields=OrderedDict([ ('importantEmail', GraphQLField(EmailEventType)), ('nonImportantEmail', GraphQLField(EmailEventType)), ]) ) test_schema = GraphQLSchema( query=QueryType, subscription=SubscriptionTypeMultiple ) stream = Subject() send_important_email, subscription = create_subscription( stream, test_schema) email = Email( from_='*****@*****.**', subject='Alright', message='Tests are good', unread=True, ) l = [] stream.subscribe(l.append) send_important_email(email) assert l[0][0] == email
class WSHandler(WebSocketHandler): def open(self): print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (on_next) it with new values self.subject = Subject() # Now we take on our magic glasses and project the stream of bytes into # a ... query = self.subject.map( lambda obj: obj["keycode"] # 1. stream of keycodes ).window_with_count( 10, 1 # 2. stream of windows (10 ints long) ).select_many( # 3. stream of booleans, True or False lambda win: win.sequence_equal(codes) ).filter( lambda equal: equal # 4. stream of Trues ) # 4. we then subscribe to the Trues, and signal Konami! if we see any query.subscribe(lambda x: self.write_message("Konami!")) def on_message(self, message): obj = json_decode(message) self.subject.on_next(obj) def on_close(self): print("WebSocket closed")
class WSSubject(Observer): def __init__(self, web_socket: WebSocketResponse): super(WSSubject, self).__init__() self._web_socket = web_socket self._push_subject = Subject() def to_observable(self): return self._push_subject async def process(self): async for msg in self._web_socket: self._push_subject.on_next(msg) self._push_subject.on_completed() def on_next(self, data): self._web_socket.send_str(data) def on_completed(self): # close web socket # has to be coroutine to close ws pass def on_error(self, error): # send error and close web socket pass
class PausableObservable(ObservableBase): def __init__(self, source, pauser=None): self.source = source self.controller = Subject() if pauser and hasattr(pauser, "subscribe"): self.pauser = self.controller.merge(pauser) else: self.pauser = self.controller super(PausableObservable, self).__init__() def _subscribe_core(self, observer): conn = self.source.publish() subscription = conn.subscribe(observer) connection = [Disposable.empty()] def on_next(b): if b: connection[0] = conn.connect() else: connection[0].dispose() connection[0] = Disposable.empty() pausable = self.pauser.distinct_until_changed().subscribe(on_next) return CompositeDisposable(subscription, connection[0], pausable) def pause(self): self.controller.on_next(False) def resume(self): self.controller.on_next(True)
def setup_streams(store, debug=False): reducer = reduce_action if debug: reducer = debug_reducer(print_traceback=False)(reducer) action_stream = Subject() \ .merge(*state_mutating_actions) \ .map(partial(resolve_action, store=store)) \ .filter(lambda action: action is not None) \ .map(lambda action: reducer(store.value, action)) action_stream.subscribe(store) changed_notes_stream = store \ .map(selectors.current_note) \ .distinct_until_changed() \ .debounce(1000, scheduler=scheduler) \ .pausable_buffered(note_ready_stream) changed_notes_stream.subscribe(do_save_note) ui_state_stream = store \ .map(selectors.ui_state) \ .debounce(500, scheduler=scheduler) \ .distinct_until_changed() ui_state_stream.subscribe(save_ui_state) actions.quit.subscribe(do_quit)
def get_subject(self): subject = Subject() subject.subscribe( self.on_next, self.on_error, self.on_completed ) return subject
def build_threads(stream_to_testers, thread_target=_perform_subscribe): threads = [] for stream, testers in stream_to_testers.items(): subject = Subject() for tester in testers: subject.subscribe(tester) threads.append(_make_thread(thread_target, stream, subject)) return threads
class Pirs: def __init__(self): io.setmode(io.BCM) self.pir_pin = 4 io.setup(self.pir_pin, io.IN) self.pirStream = Subject() io.add_event_detect(self.pir_pin, io.RISING, callback=self.hit_callback) def hit_callback(self, channel): logger.logger.debug('PIR DETECTION!') self.pirStream.on_next(True)
class Window(Gtk.Window): def __init__(self): super().__init__() self.resize(600, 600) self.add_events(Gdk.EventMask.POINTER_MOTION_MASK) self.connect("motion-notify-event", self.on_mouse_move) self.mousemove = Subject() def on_mouse_move(self, widget, event): self.mousemove.on_next((event.x, event.y))
def on_next_left(value): s = Subject() with self.lock: _id = left_id[0] left_id[0] += 1 left_map[_id] = s try: result = result_selector(value, add_ref(s, r)) except Exception as e: log.error("*** Exception: %s" % e) for left_value in left_map.values(): left_value.on_error(e) observer.on_error(e) return observer.on_next(result) for right_value in right_map.values(): s.on_next(right_value) md = SingleAssignmentDisposable() group.add(md) def expire(): if _id in left_map: del left_map[_id] s.on_completed() group.remove(md) try: duration = left_duration_selector(value) except Exception as e: for left_value in left_map.values(): left_value.on_error(e) observer.on_error(e) return def on_error(e): for left_value in left_map.values(): left_value.on_error(e) observer.on_error(e) md.disposable = duration.take(1).subscribe( nothing, on_error, expire)
def action(scheduler, state=None): s = None if is_shift: s = Subject() q.append(s) observer.on_next(add_ref(s, ref_count_disposable)) if is_span: s = q.pop(0) s.on_completed() create_timer()
class Window(QWidget): def __init__(self): super(QWidget, self).__init__() self.setWindowTitle("Rx for Python rocks") self.resize(600, 600) self.setMouseTracking(True) # This Subject is used to transmit mouse moves to labels self.mousemove = Subject() def mouseMoveEvent(self, event): self.mousemove.on_next((event.x(), event.y()))
class Frame(wx.Frame): def __init__(self): super(Frame, self).__init__(None) self.SetTitle("Rx for Python rocks") self.SetSize((600, 600)) # This Subject is used to transmit mouse moves to labels self.mousemove = Subject() self.Bind(wx.EVT_MOTION, self.OnMotion) def OnMotion(self, event): self.mousemove.on_next((event.GetX(), event.GetY()))
class DoorListener: def __init__(self): self.door_message_server = MessageServer(config.door_tag_port) self.openDoorStream = Subject() self.vibeDoorStream = Subject() def on_message(self, data): if data == 'door opened': self.openDoorStream.on_next(True) if data == 'door hit': self.vibeDoorStream.on_next(True) def listen(self): self.door_message_server.listen(self.on_message)
def on_next_left(value): subject = Subject() with left.lock: _id = left_id[0] left_id[0] += 1 left_map[_id] = subject try: result = (value, add_ref(subject, rcd)) except Exception as e: log.error("*** Exception: %s" % e) for left_value in left_map.values(): left_value.on_error(e) observer.on_error(e) return observer.on_next(result) for right_value in right_map.values(): subject.on_next(right_value) md = SingleAssignmentDisposable() group.add(md) def expire(): if _id in left_map: del left_map[_id] subject.on_completed() group.remove(md) try: duration = left_duration_mapper(value) except Exception as e: for left_value in left_map.values(): left_value.on_error(e) observer.on_error(e) return def on_error(error): for left_value in left_map.values(): left_value.on_error(error) observer.on_error(error) md.disposable = duration.pipe(ops.take(1)).subscribe_(nothing, on_error, expire, scheduler)
def __init__(self, new_players, exiting_players): self._new_players = [] self._players = [] self._removed_players = [] new_players.subscribe(self.add_player) exiting_players.subscribe(self.remove_player) # streams api self.new_players_broadcast = Subject() self.removed_players_broadcast = Subject() self.collisions = Subject() self.players = BehaviorSubject([]) self.players_count = self.players \ .map(lambda ps: len(ps))
def open(self): scheduler = AsyncIOScheduler() print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (send) it with new values self.subject = Subject() # Get all distinct key up events from the input and only fire if long enough and distinct searcher = self.subject.pipe( ops.map(lambda x: x["term"]), ops.filter(lambda text: len(text) > 2), # Only if the text is longer than 2 characters ops.debounce(0.750), # Pause for 750ms ops.distinct_until_changed(), # Only if the value has changed ops.flat_map_latest(search_wikipedia) ) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error, scheduler=scheduler)
def test_subject_create(): _x = [None] _ex = [None] done = False def on_next(x): _x[0] = x def on_error(ex): _ex[0] = ex def on_completed(): done = True v = AnonymousObserver(on_next, on_error, on_completed) o = rx.return_value(42) s = Subject.create(v, o) def on_next2(x): _x[0] = x s.subscribe(on_next2) assert(42 == _x[0]) s.on_next(21) e = 'ex' s.on_error(e) assert(e == _ex[0]) s.on_completed() assert(not done)
def __init__(self, name=None, keys=None, desc=None, missing_value=np.nan, data_list=None, use_col_np=False): """ :param name: :param keys: :param desc: :param missing_value: :param data_list: :param use_col_np: If True, the column based storage of list will used to pass to numpy function """ super(DataSeries, self).__init__() self.name = name self.keys = self._get_key(keys, None) self.desc = desc if desc else name self.missing_value = missing_value self.data_list = [] self.time_list = [] self.data_time_dict = {} self.use_col_np = use_col_np self.subject = Subject() if data_list: for data in data_list: self.add(data)
def open(self): print("WebSocket opened") self.write_message("connection opened") def send_response(x): self.write_message(json.dumps(x)) def on_error(ex): print(ex) self.subject = Subject() user_input = self.subject.throttle_last( 1000 # Given the last value in a given time interval ).start_with( '' # Immediately after the subscription sends the default value ).filter( lambda text: not text or len(text) > 2 ) interval_obs = Observable.interval( 60000 #refresh the value every 60 Seconds for periodic updates ).start_with(0) self.combine_latest_sbs = user_input.combine_latest( interval_obs, lambda input_val, i: input_val ).do_action( lambda x: send_response('clear') ).flat_map( self.get_data ).subscribe(send_response, on_error)
def open(self): print("WebSocket opened") # Subject одновременно и observable, и observer self.subject = Subject() def send_response(x): self.write_message(json.dumps(x)) def on_error(ex): print(ex) user_input = self.subject.throttle_last( 1000 # На заданном временном промежутке получать последнее значение ).start_with( '' # Сразу же после подписки отправляет значение по умолчанию ).filter( lambda text: not text or len(text) > 2 ) interval_obs = Observable.interval( 60000 # Отдает значение раз в 60с (для периодического обновления) ).start_with(0) # combine_latest собирает 2 потока из запросов пользователя и временных # интервалов, срабатывает на любое сообщение из каждого потока self.combine_latest_sbs = user_input.combine_latest( interval_obs, lambda input_val, i: input_val ).do_action( # Срабатывает на каждый выпущенный элемент # Отправляет сообщение для очистки списка на фронтэнд lambda x: send_response('clear') ).flat_map( # В цепочку встраивается observable для получения списка self.get_data ).subscribe(send_response, on_error)
def open(self): print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (on_next) it with new values self.subject = Subject() # Get all distinct key up events from the input and only fire if long enough and distinct query = self.subject.select( lambda x: x["term"] ).filter( lambda text: len(text) > 2 # Only if the text is longer than 2 characters ).throttle( 0.750, # Pause for 750ms scheduler=scheduler ).distinct_until_changed() # Only if the value has changed searcher = query.flat_map_latest(search_wikipedia) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error)
class WSHandlerSubject(Observable): def __init__(self): self._subject = Subject() super(WSHandlerSubject, self).__init__(self._subject.subscribe) async def __call__(self, *args, **kwargs): request = args[0] web_socket = WebSocketResponse() await web_socket.prepare(request) ws_subject = WSSubject(web_socket) self._subject.on_next(ws_subject) await ws_subject.process() return web_socket
def __init__(self): super(QWidget, self).__init__() self.setWindowTitle("Rx for Python rocks") self.resize(600, 600) self.setMouseTracking(True) # This Subject is used to transmit mouse moves to labels self.mousemove = Subject()
def __init__(self): super().__init__() self.resize(600, 600) self.add_events(Gdk.EventMask.POINTER_MOTION_MASK) self.connect('motion-notify-event', self.on_mouse_move) self.mousemove = Subject()
def __init__(self): super(Frame, self).__init__(None) self.SetTitle("Rx for Python rocks") self.SetSize((600, 600)) # This Subject is used to transmit mouse moves to labels self.mousemove = Subject() self.Bind(wx.EVT_MOTION, self.OnMotion)
def __init__(self, source, pauser=None): self.source = source self.controller = Subject() if pauser and hasattr(pauser, "subscribe"): self.pauser = self.controller.merge(pauser) else: self.pauser = self.controller super(PausableBufferedObservable, self).__init__(self._subscribe)
def __init__(self, enable_queue=True, scheduler=None): super(ControlledSubject, self).__init__(self._subscribe) self.subject = Subject() self.enable_queue = enable_queue self.queue = [] if enable_queue else None self.requested_count = 0 self.requested_disposable = Disposable.empty() self.error = None self.has_failed = False self.has_completed = False self.scheduler = scheduler or current_thread_scheduler
def on_next(x): nonlocal s, n, window_id new_window = False new_id = 0 s.on_next(x) n += 1 if n == count: new_window = True n = 0 window_id += 1 new_id = window_id s.on_completed() s = Subject() observer.on_next(add_ref(s, ref_count_disposable)) if new_window: create_timer(new_id)
def test_stop_default_loop_on_item(self): stopped = False stop_control = Subject() def stop_loop(): nonlocal stopped stopped = True stop_control.on_next(True) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.call_soon(stop_loop) driver = stop.make_driver() driver.call(stop.Sink(control=stop_control)) loop.run_forever() loop.close() self.assertEqual(True, stopped)
def __init__(self, id, local_setvelocity_publisher, xoffset, yoffset, leader): self.local_setvelocity_publisher = local_setvelocity_publisher self.id = id self.xoffset = xoffset self.yoffset = yoffset self.leader = leader self.started = False self.ros_subscriptions = [] self.flock_coordinates = {} self.leaderposition_subject = Subject() self.selfposition_subject = Subject() self.leadervelocity_subject = Subject() self.selfvelocity_subject = Subject() self.flock_repulsion = Subject() formation_position_attraction = Observable.combine_latest( self.leaderposition_subject, self.selfposition_subject, lambda leaderposition, selfposition: self.formation_position( leaderposition, selfposition)) leaderVelocity = self.leadervelocity_subject \ .map(lambda twist: self.format_velocities(twist)) leaderVelocityDampening = Observable.combine_latest( self.leadervelocity_subject, self.selfvelocity_subject, lambda leadertwist, selftwist: self.velocity_dampening( leadertwist, selftwist)) # self.navigate_subscription = Observable.combine_latest([leaderVelocity, formation_position_attraction, self.flock_repulsion], lambda vectors: self.navigate(vectors)) self.navigate_subscription = Observable.combine_latest([leaderVelocity, leaderVelocityDampening, formation_position_attraction, self.flock_repulsion], lambda *positions: positions) \ .sample(self.SAMPLE_RATE) \ .subscribe(lambda vectors: self.navigate(vectors)) # self.navigate_subscription = self.flock_repulsion \ # .subscribe(lambda vectors: self.navigate([vectors])) self.flockSubscription = Observable.empty().subscribe()
def test_event_order_is_correct_for_multiple_publishes(): # type: () -> None stream = Subject() send_important_email, subscription = create_subscription(stream) payload = [] subscription.subscribe(payload.append) send_important_email( Email( from_="*****@*****.**", subject="Message", message="Tests are good", unread=True, ) ) send_important_email( Email( from_="*****@*****.**", subject="Message 2", message="Tests are good 2", unread=True, ) ) expected_payload1 = { "importantEmail": { "email": {"from": "*****@*****.**", "subject": "Message"}, "inbox": {"unread": 1, "total": 2}, } } expected_payload2 = { "importantEmail": { "email": {"from": "*****@*****.**", "subject": "Message 2"}, "inbox": {"unread": 2, "total": 3}, } } assert len(payload) == 2 assert payload[0].data == expected_payload1 assert payload[1].data == expected_payload2
def subscribe(observer): m = SerialDisposable() d = CompositeDisposable(m) r = RefCountDisposable(d) window = [Subject()] observer.on_next(add_ref(window[0], r)) def on_next(x): window[0].on_next(x) def on_error(ex): window[0].on_error(ex) observer.on_error(ex) def on_completed(): window[0].on_completed() observer.on_completed() d.add(source.subscribe(on_next, on_error, on_completed)) def create_window_close(): try: window_close = window_closing_selector() except Exception as exception: log.error("*** Exception: %s" % exception) observer.on_error(exception) return def on_completed(): window[0].on_completed() window[0] = Subject() observer.on_next(add_ref(window[0], r)) create_window_close() m1 = SingleAssignmentDisposable() m.disposable = m1 m1.disposable = window_close.take(1).subscribe( noop, on_error, on_completed) create_window_close() return r
def test_paused_skips(self): subscription = [None] scheduler = TestScheduler() controller = Subject() results = scheduler.create_observer() xs = scheduler.create_hot_observable(on_next(150, 1), on_next(210, 2), on_next(230, 3), on_next(301, 4), on_next(350, 5), on_next(399, 6), on_completed(500)) def action0(scheduler, state): subscription[0] = xs.pausable(controller).subscribe(results) controller.on_next(True) scheduler.schedule_absolute(200, action0) def action1(scheduler, state): controller.on_next(False) scheduler.schedule_absolute(300, action1) def action2(scheduler, state): controller.on_next(True) scheduler.schedule_absolute(400, action2) def action3(scheduler, state): subscription[0].dispose() scheduler.schedule_absolute(1000, action3) scheduler.start() assert results.messages == [ on_next(210, 2), on_next(230, 3), on_completed(500) ]
class Client: def __init__(self, host = 'localhost', port = 8888): self.url = 'ws://{}/{}/exchange'.format(host, port) self.conn = None self.opened = Subject() self.messages = Subject() def write_message(self, message): self.conn.w def connect(self): def on_message_callback(message): self.messages.on_next(message) def on_connect(connection): self.conn = connection self.opened.on_next(connection) self.opened.on_completed() self.opened.dispose() future = websocket_connect(self.url, on_message_callback=on_message_callback) Observable.from_future(future).subscribe(on_connect)
def subscribe(observer, scheduler=None): m = SerialDisposable() d = CompositeDisposable(m) r = RefCountDisposable(d) window = [Subject()] observer.on_next(add_ref(window[0], r)) def on_next(value): window[0].on_next(value) def on_error(error): window[0].on_error(error) observer.on_error(error) def on_completed(): window[0].on_completed() observer.on_completed() d.add(source.subscribe_(on_next, on_error, on_completed, scheduler)) def create_window_on_completed(): try: window_close = window_closing_mapper() except Exception as exception: observer.on_error(exception) return def on_completed(): window[0].on_completed() window[0] = Subject() observer.on_next(add_ref(window[0], r)) create_window_on_completed() m1 = SingleAssignmentDisposable() m.disposable = m1 m1.disposable = window_close.pipe(ops.take(1)).subscribe_( noop, on_error, on_completed, scheduler) create_window_on_completed() return r
def test_multicast_hot_1(self): scheduler = TestScheduler() s = Subject() xs = scheduler.create_hot_observable(on_next(40, 0), on_next(90, 1), on_next(150, 2), on_next(210, 3), on_next(240, 4), on_next(270, 5), on_next(330, 6), on_next(340, 7), on_completed(390)) o = scheduler.create_observer() d1 = [None] d2 = [None] c = [None] def action(scheduler, state): c[0] = xs.multicast(s) scheduler.schedule_absolute(50, action) def action0(scheduler, state): d1[0] = c[0].subscribe(o) scheduler.schedule_absolute(100, action0) def action1(scheduler, state): d2[0] = c[0].connect() scheduler.schedule_absolute(200, action1) def action2(scheduler, state): d1[0].dispose() scheduler.schedule_absolute(300, action2) scheduler.start() o.messages.assert_equal(on_next(210, 3), on_next(240, 4), on_next(270, 5)) xs.subscriptions.assert_equal(subscribe(200, 390))
class AsyncConnection(asyncio.Protocol, Connection): def __init__(self, on_connect): super().__init__() self.on_connect = on_connect def connection_made(self, transport: asyncio.Transport): peername = transport.get_extra_info('peername') logger.info('Connection from %s', peername) self.transport = transport self.data_in = Subject() self.data_out = AnonymousObserver(self.on_data_out_next, self.on_data_out_error, self.on_data_out_completed) self.on_connect(self) def eof_received(self): logger.debug('data eof received') self.data_in.on_completed() def resume_writing(self): logger.debug('data resume') def pause_writing(self): logger.debug('data pause') def connection_lost(self, exc): logger.debug('data connection lost') self.data_in.on_error(exc) def data_received(self, data): logger.debug('data received: %s', data) self.data_in.on_next(data) def on_data_out_next(self, data): logger.debug('sending: %s', data) self.transport.write(data) def on_data_out_error(self, exception): logger.exception('data_out error: %r', exception) self.close() def on_data_out_completed(self): logger.info('data_out completed') self.close() def close(self): self.transport.close()
def adapter(source): sink_request = Subject() def walk(top, recursive=False): def on_subscribe(observer): response = (source.filter(lambda i: i.id is response_observable). take(1).map(lambda i: i.content)) dispose = response.subscribe(observer) sink_request.on_next( Walk( id=response_observable, top=top, recursive=recursive, )) return dispose response_observable = Observable.create(on_subscribe) return response_observable return Adapter(sink=sink_request, api=Api(walk=walk, ))
def test_paused_with_state_change_in_subscriber(self): scheduler = TestScheduler() results = scheduler.create_observer() xs = scheduler.create_hot_observable(on_next(150, 1), on_next(210, 2), on_next(250, 3), on_next(270, 4), on_next(330, 5), on_completed(500)) controller = Subject() pausable_buffered = xs.pausable_buffered(controller) def action1(scheduler, state): def on_next(value): results.on_next(value) controller.on_next(False) def action2(scheduler, state): controller.on_next(True) scheduler.schedule_relative(100, action2) subscription = pausable_buffered.subscribe_( on_next, results.on_error, results.on_completed) controller.on_next(True) scheduler.schedule_absolute(200, action1) scheduler.start() assert results.messages == [ on_next(210, 2), on_next(310, 3), on_next(310, 4), on_next(410, 5), on_completed(500) ]
def test_multicast_hot_2(self): c = [None] d1 = [None] d2 = [None] scheduler = TestScheduler() xs = scheduler.create_hot_observable(on_next(40, 0), on_next(90, 1), on_next(150, 2), on_next(210, 3), on_next(240, 4), on_next(270, 5), on_next(330, 6), on_next(340, 7), on_completed(390)) s = Subject() o = scheduler.create_observer() def action0(scheduler, state): c[0] = xs.multicast(s) scheduler.schedule_absolute(50, action0) def action1(scheduler, state): d2[0] = c[0].connect(scheduler) scheduler.schedule_absolute(100, action1) def action2(scheduler, state): d1[0] = c[0].subscribe(o, scheduler) scheduler.schedule_absolute(200, action2) def action3(scheduler, state): return d1[0].dispose() scheduler.schedule_absolute(300, action3) scheduler.start() assert o.messages == [ on_next(210, 3), on_next(240, 4), on_next(270, 5) ] assert xs.subscriptions == [subscribe(100, 390)]
def __init__(self, tupleSelector: TupleSelector, cacheEnabled: bool = True) -> None: if _CachedSubscribedData.__memoryLoggingEnabled: _CachedSubscribedData.__memoryLoggingRefs.append(weakref.ref(self)) self.tupleSelector: TupleSelector = tupleSelector self.vortexUuids: Set[str] = set() self.tearDownDate: Optional[datetime] = None self.encodedPayload: bytes = None #: Is the cache enabled # HINT: Once it's turned off for a tupleSelector, it remains off. # Disabling it can also be done from VortexJS self.cacheEnabled = cacheEnabled #: Last Server Payload Date # If the server has responded with a payload, this is the date in the payload # @type {Date | null} self.lastServerPayloadDate: Optional[datetime] = None self.subject = Subject()
def main(): root = Tk() root.title("Rx for Python rocks") scheduler = TkinterScheduler(root) mousemove = Subject() frame = Frame(root, width=600, height=600) frame.bind("<Motion>", mousemove.on_next) text = 'TIME FLIES LIKE AN ARROW' def on_next(info): label, ev, i = info label.place(x=ev.x + i*12 + 15, y=ev.y) def handle_label(label, i): label.config(dict(borderwidth=0, padx=0, pady=0)) mapper = ops.map(lambda ev: (label, ev, i)) delayer = ops.delay(i*0.1) return mousemove.pipe( delayer, mapper ) labeler = ops.flat_map_indexed(handle_label) mapper = ops.map(lambda c: Label(frame, text=c)) rx.from_(text).pipe( mapper, labeler ).subscribe(on_next, on_error=print, scheduler=scheduler) frame.pack() root.mainloop()
def test_subject_create(): _x = None _ex = None done = False def on_next(x): nonlocal _x _x = x def on_error(ex): nonlocal _ex _ex = ex def on_completed(): done = True v = Observer(on_next, on_error, on_completed) o = Observable.return_value(42) s = Subject.create(v, o) def on_next2(x): nonlocal _x _x = x s.subscribe(on_next2) assert (42 == _x) s.on_next(21) e = 'ex' s.on_error(e) assert (e == _ex) s.on_completed() assert (not done)
def make_sink_proxies(drivers): ''' Build a list of sink proxies. sink proxies are a two-level ordered dictionary. The first level contains the lst of drivers, and the second level contains the list of sink proxies for each driver: drv1-->sink1 | |->sink2 | drv2-->sink1 |->sink2 ''' sink_proxies = OrderedDict() if drivers is not None: for driver_name in drivers._fields: driver = getattr(drivers, driver_name) driver_sink = getattr(driver, 'input') if driver_sink is not None: driver_sink_proxies = OrderedDict() for name in driver_sink._fields: driver_sink_proxies[name] = Subject() sink_proxies[driver_name] = driver.input(**driver_sink_proxies) return sink_proxies
def __init__(self, chr_contr, executor, gen0=None, opts=default_options): self.gen0 = gen0 self.chr_contr = chr_contr self.pauser = Pauser() self.executor = executor self.executor.kpp_fun = self.pauser.kpp self.sub_chr_best = ReplaySubject() self.sub_gen_solved = Subject() self.sub_gen_unsolved = Subject() self.subj_done = Subject() self.su_genetic = StepUpGenetic(chr_contr) self.opts = dict(default_options) self.init_opts(opts) self.stop_flag = True self.gen_flag = True self.init_logic()
def test_add_route(self): routes = [ httpd.AddRoute(methods=['GET'], path='/foo', id='foo'), httpd.AddRoute(methods=['POST'], path='/bar', id='bar'), httpd.AddRoute(methods=['PUT'], path='/biz', id='biz'), ] actual_routes = [] loop = asyncio.new_event_loop() loop.set_debug(True) asyncio.set_event_loop(loop) sink = httpd.Sink(control=Subject()) def setup(sink): sink.control.on_next(httpd.Initialize()), sink.control.on_next(httpd.StartServer(host='localhost', port=9999)), for route in routes: sink.control.on_next(route) def on_route_item(i): if type(i) is httpd.RouteAdded: actual_routes.append(i) # stop mainloop when last route is created if i.id == routes[-1].id: asyncio.get_event_loop().stop() loop.call_soon(setup, sink) source = httpd.make_driver(loop).call(sink) source.route.subscribe(on_route_item) loop.run_forever() loop.close() self.assertEqual(len(routes), len(actual_routes)) for index,route in enumerate(actual_routes): self.assertEqual(routes[index].path, route.path) self.assertEqual(routes[index].id, route.id) self.assertIsInstance(route.request, Observable)
def __init__(self): scheduler = IOLoopScheduler(IOLoop.current()) self._app = Application([ (r'/exchange', ExchangeHandler), ]) self._servers = [ ['localhost', '8888'], ['localhost', '7777'], ] self._current_server = 0 self.messages = Subject() def passthrough_to_matching_server(msg): if self.matching_server_connection is None: print('server is DOWN') return self.matching_server_connection.write_message(msg) self.only_messages = self.messages \ .filter(lambda msg: msg[0] == 'message') \ .map(lambda msg: msg[1]) \ .subscribe(passthrough_to_matching_server)
def subject_selector(): return Subject()
def subject_factory(scheduler): return Subject()
def get_config(): config_file = open(os.path.join(DIRNAME, '..', 'config.json'), 'r') config = json.load(config_file) config_file.close() return config CONFIG = get_config() VARS = { 'endpoint_id': None, 'message_queue': ReplaySubject(), 'message_id': 0, 'sock': None, 'message_buffer': [] } EVENT_SOURCE = Subject() def get_message_id(): """get unique message id""" num = VARS['message_id'] VARS['message_id'] = VARS['message_id'] + 1 return '{!s}:{!s}'.format(VARS['endpoint_id'], num) def _on_message(msg): if msg is None: #disconnected on_disconnect() reconnect() else:
class HttpParser(Observer): def __init__(self, conn): super(HttpParser, self).__init__() self.conn = conn self.buf = StringIO() self.requests_in = Subject() self.responses_out = HttpWriter(conn) self.keep_alive_timeout_dispose = Disposable.empty() self.read_timeout_dispose = Disposable.empty() self.keep_alive_timer_on() def schedule_timeout(self, seconds): def action(scheduler, state=None): print 'timeout', seconds self.requests_in.on_error(HttpResponse(408, 'Request Timeout')) return scheduler.schedule_relative(timedelta(seconds=seconds), action) def clear_timeout(self, disposable): try: disposable.dispose() except: # Twisted sometimes complains when we try to cancel timeout after it has already fired pass def keep_alive_timer_on(self): self.keep_alive_timer_off() self.keep_alive_timeout_dispose = self.schedule_timeout( KEEP_ALIVE_TIMEOUT) def keep_alive_timer_off(self): self.clear_timeout(self.keep_alive_timeout_dispose) def read_timer_on(self): self.read_timer_off() self.read_timeout_dispose = self.schedule_timeout(READ_TIMEOUT) def read_timer_off(self): self.clear_timeout(self.read_timeout_dispose) def parse_request(self, buf): lines = buf.split('\r\n') first_line = lines[0].split() if len(first_line) == 3: self.requests_in.on_next( HttpRequest(self.conn, first_line[0], first_line[1])) else: self.requests_in.on_error(HttpResponse(400, 'Bad Request')) def on_next(self, data): self.keep_alive_timer_off() self.read_timer_on() self.buf.write(data) # append new data buf = self.buf.getvalue() eor = buf.find('\r\n\r\n') # check we've got full request if eor >= 0: self.buf = StringIO() self.buf.write(buf[eor + 4:]) # leave remainder in buf self.parse_request(buf[:eor]) self.read_timer_off() self.keep_alive_timer_on() def on_error(self, e): print 'parser got error', e self.keep_alive_timer_off() self.read_timer_off() self.requests_in.on_error(HttpResponse(500, 'Internal Server Error')) def on_completed(self): print 'parser completed' self.keep_alive_timer_off() self.read_timer_off() self.requests_in.on_completed()
def connectionMade(self): self.data_in = Subject() self.listener.on_next(self)
class WorkspaceProcessContext: """ This class is process-scoped object that is initialized using the repository handles from a Workspace. The responsibility of this class is to: 1. Maintain an update-to-date dictionary of repository locations 1. Create `WorkspaceRequestContexts` whever a request is made 2. Run watch thread processes that monitor repository locations In most cases, you will want to create a `WorkspaceRequestContext` to make use of this class. """ def __init__(self, instance, workspace, version=None): # lazy import for perf from rx.subjects import Subject self._instance = check.inst_param(instance, "instance", DagsterInstance) self._workspace = workspace self._location_state_events = Subject() self._location_state_subscriber = LocationStateSubscriber( self._location_state_events_handler ) self.version = version self._initialize_repository_locations() def _initialize_repository_locations(self): self._repository_locations = {} for location in self._workspace.repository_locations: check.invariant( self._repository_locations.get(location.name) is None, 'Cannot have multiple locations with the same name, got multiple "{name}"'.format( name=location.name, ), ) location.add_state_subscriber(self._location_state_subscriber) self._repository_locations[location.name] = location def create_request_context(self): return WorkspaceRequestContext( instance=self.instance, workspace_snapshot=self._workspace.create_snapshot(), repository_locations_dict=self._repository_locations.copy(), process_context=self, version=self.version, ) @property def instance(self): return self._instance @property def repository_locations(self): return list(self._repository_locations.values()) @property def location_state_events(self): return self._location_state_events def _location_state_events_handler(self, event): # If the server was updated or we were not able to reconnect, we immediately reload the # location handle if event.event_type in ( LocationStateChangeEventType.LOCATION_UPDATED, LocationStateChangeEventType.LOCATION_ERROR, ): # In case of an updated location, reload the handle to get updated repository data and # re-attach a subscriber # In case of a location error, just reload the handle in order to update the workspace # with the correct error messages self.reload_repository_location(event.location_name) self._location_state_events.on_next(event) def reload_repository_location(self, name): self._workspace.reload_repository_location(name) if self._workspace.has_repository_location(name): new_location = self._workspace.get_repository_location(name) new_location.add_state_subscriber(self._location_state_subscriber) check.invariant(new_location.name == name) self._repository_locations[name] = new_location elif name in self._repository_locations: del self._repository_locations[name] return self def reload_workspace(self): self._workspace.reload_workspace() self._initialize_repository_locations()
def __init__(self, width, height, path, plist, target, is_workspace, is_release, is_http, fir='', store_name='', store_pwd=''): """ 此窗口 固定宽高 ,不可变动 :param width: 窗口宽度 :param height: 窗口高度 公开属性: window, 主窗体 source, Rx 信号体 通过 source.subscribe(... 获得回调信号 """ TkBase.__init__(self, width, height) self.project_path = tk.StringVar() self.plist_path = tk.StringVar() self.target = tk.StringVar() self.project_path.set(os.path.abspath('..')) # 默认父级目录 self.plist_path.set(os.path.abspath('.') + '/ipaAppStore.plist') # 默认当前目录 self.record_info = tk.StringVar() self.record_info.set('AutomationIpaInfo') self.is_workspace = tk.BooleanVar() self.is_workspace.set(True) self.is_release = tk.BooleanVar() self.is_release.set(True) self.is_git = tk.BooleanVar() self.is_git.set(True) self.is_http = tk.BooleanVar() self.is_http.set(False) self.fir_token = tk.StringVar() self.fir_token.set('') self.is_fir = tk.BooleanVar() self.is_fir.set(True) self.store_name = tk.StringVar() self.store_name.set('') self.store_pwd = tk.StringVar() self.store_pwd.set('') self.is_store = tk.BooleanVar() self.is_store.set(True) self.__make_var() self.__make_widgets() self.source = Subject() self.project_path.set(path) self.plist_path.set(plist) self.target.set(target) self.is_workspace.set(is_workspace) self.is_release.set(is_release) self.is_fir.set(not is_release) self.is_store.set(is_release) self.is_http.set(is_http) self.fir_token.set(fir) self.store_name.set(store_name) self.store_pwd.set(store_pwd)
class TkSelect(TkBase): def __init__(self, width, height, path, plist, target, is_workspace, is_release, is_http, fir='', store_name='', store_pwd=''): """ 此窗口 固定宽高 ,不可变动 :param width: 窗口宽度 :param height: 窗口高度 公开属性: window, 主窗体 source, Rx 信号体 通过 source.subscribe(... 获得回调信号 """ TkBase.__init__(self, width, height) self.project_path = tk.StringVar() self.plist_path = tk.StringVar() self.target = tk.StringVar() self.project_path.set(os.path.abspath('..')) # 默认父级目录 self.plist_path.set(os.path.abspath('.') + '/ipaAppStore.plist') # 默认当前目录 self.record_info = tk.StringVar() self.record_info.set('AutomationIpaInfo') self.is_workspace = tk.BooleanVar() self.is_workspace.set(True) self.is_release = tk.BooleanVar() self.is_release.set(True) self.is_git = tk.BooleanVar() self.is_git.set(True) self.is_http = tk.BooleanVar() self.is_http.set(False) self.fir_token = tk.StringVar() self.fir_token.set('') self.is_fir = tk.BooleanVar() self.is_fir.set(True) self.store_name = tk.StringVar() self.store_name.set('') self.store_pwd = tk.StringVar() self.store_pwd.set('') self.is_store = tk.BooleanVar() self.is_store.set(True) self.__make_var() self.__make_widgets() self.source = Subject() self.project_path.set(path) self.plist_path.set(plist) self.target.set(target) self.is_workspace.set(is_workspace) self.is_release.set(is_release) self.is_fir.set(not is_release) self.is_store.set(is_release) self.is_http.set(is_http) self.fir_token.set(fir) self.store_name.set(store_name) self.store_pwd.set(store_pwd) def __make_var(self): path = os.path.abspath('..') # 获取当前父级目录 self.project_path.set(path) # 默认父级目录 # 获取父级目录下所有文件 for name in os.listdir(path): if name.endswith('.xcodeproj'): self.target.set(name.split('.')[0]) self.is_workspace.set(False) if name.endswith('.xcworkspace'): self.target.set(name.split('.')[0]) self.is_workspace.set(True) def run(self): self.window.mainloop() def quit(self): self.window.quit() self.window.destroy() def __make_widgets(self): # 底部容器 frame0 = tk.Frame(self.window) frame0.grid(row=0, column=0) frame1 = tk.Frame(self.window) frame1.grid(row=1, column=0) frame2 = tk.Frame(self.window) frame2.grid(row=2, column=0) # frame0 容器 self.__make_title_empty(frame0, 0, 0) f1 = tk.Frame(frame0) f1.grid(row=1, column=0) f2 = tk.Frame(frame0) f2.grid(row=1, column=1) # frame1 容器 self.__make_title_empty(frame1, 0, 0) self.__make_select_confirm(frame1, 1, 0) # frame2 容器 self.__make_title_info(frame2, 0, 0) # f1 容器 self.__make_title_empty(f1, 0, 0) self.__make_title(f1, 0, 1, '项目目录:(选择.xcworkspace or .xcodeproj文件)') self.__make_title_empty(f1, 1, 0) self.__make_select_text(f1, 1, 1, 1, self.project_path) self.__make_title_empty(f1, 2, 0) self.__make_title(f1, 2, 1, '配置.plist 路径:(对应是否Release,不可单独修改)') self.__make_title_empty(f1, 3, 0) self.__make_select_text(f1, 3, 1, 2, self.plist_path) self.__make_title_empty(f1, 4, 0) self.__make_title(f1, 4, 1, '项目 Target:') self.__make_title_empty(f1, 5, 0) self.__make_select_text(f1, 5, 1, 2, self.target, False) """ self.__make_title_empty(f1, 6, 0) self.__make_title(f1, 6, 1, '配置存储文件名') self.__make_title_empty(f1, 7, 0) self.__make_select_text(f1, 7, 1, 2, self.record_info, False) """ self.__make_title_empty(f1, 8, 0) self.__make_title(f1, 8, 1, 'Fir Token:') self.__make_title_empty(f1, 9, 0) self.__make_select_text(f1, 9, 1, 2, self.fir_token, False) self.__make_title_empty(f1, 10, 0) self.__make_title(f1, 10, 1, 'App Store 账号:') self.__make_title_empty(f1, 11, 0) self.__make_select_text(f1, 11, 1, 2, self.store_name, False) self.__make_title_empty(f1, 12, 0) self.__make_title(f1, 12, 1, 'App Store 密码:') self.__make_title_empty(f1, 13, 0) self.__make_select_text(f1, 13, 1, 2, self.store_pwd, False) # f2 容器 self.__make_title(f2, 0, 0, '是否.workspace:', 15, 'e') self.__make_radio_button(f2, 0, 1, '是', True, self.is_workspace) self.__make_radio_button(f2, 0, 2, '否', False, self.is_workspace) self.__make_title_empty(f2, 1, 0) self.__make_title(f2, 2, 0, '是否 Release:', 15, 'e') self.__make_radio_button(f2, 2, 1, '是', True, self.is_release) self.__make_radio_button(f2, 2, 2, '否', False, self.is_release) self.__make_title_empty(f2, 3, 0) self.__make_title(f2, 4, 0, 'Git or SVN:', 15, 'e') self.__make_radio_button(f2, 4, 1, 'Git', True, self.is_git) self.__make_radio_button(f2, 4, 2, 'SVN', False, self.is_git) self.__make_title_empty(f2, 5, 0) self.__make_title(f2, 6, 0, '开启http服务:', 15, 'e') self.__make_radio_button(f2, 6, 1, '开', True, self.is_http) self.__make_radio_button(f2, 6, 2, '关', False, self.is_http) self.__make_title_empty(f2, 7, 0) self.__make_title(f2, 8, 0, '上传到fir.im:', 15, 'e') self.__make_radio_button(f2, 8, 1, '是', True, self.is_fir) self.__make_radio_button(f2, 8, 2, '否', False, self.is_fir) self.__make_title_empty(f2, 9, 0) self.__make_title(f2, 10, 0, '上传到App Store:', 15, 'e') self.__make_radio_button(f2, 10, 1, '是', True, self.is_store) self.__make_radio_button(f2, 10, 2, '否', False, self.is_store) self.__make_title_empty(f2, 11, 0) def __make_title_empty(self, frame, row, column): tk.Label(frame, text='', width=1, anchor='w').grid(row=row, column=column) def __make_title_info(self, frame, row, column): tk.Label(frame, text=info, font=('Arial', 10), fg='gray', width=90, justify='left', anchor='w').grid(row=row, column=column) def __make_title(self, frame, row, column, title=' ', width=35, anchor='w'): tk.Label(frame, text=title, width=width, anchor=anchor).grid(row=row, column=column) def __make_select_text(self, frame, row, column, tag, text, sele=True): entry = tk.Entry(frame, width=35, textvariable=text) entry.grid(row=row, column=column) if sele: button = tk.Button(frame, text='选择') button.grid(row=row, column=column + 1) button['command'] = (lambda: self.__click_select(entry, tag)) def __make_radio_button(self, frame, row, column, text, val, var): tk.Radiobutton(frame, text=text, width=5, variable=var, value=val).grid(row=row, column=column) def __print_selection(self, tag): pass def __click_select(self, entry, tag): if tag == 1: path = filedialog.askopenfilename() if len(path) > 0: t = '/' list = path.split('/') pro = list.pop() path = t.join(list) self.is_workspace.set('.xcworkspace' in pro) tag = pro.split('.')[0] self.project_path.set(path) self.target.set(tag) else: path = filedialog.askopenfilename() if len(path) > 0: t = '/' self.plist_path.set(path) list = path.split('/') pro = list.pop() self.is_release.set('AppStore' in pro) def __select_excel_input(self, entry_box): path = filedialog.askopenfilename() entry_box.delete(0, len(entry_box.get())) entry_box.insert(0, path) self.project_path = path # 如果 函数内部没有使用到 self. 会提示 Method 'select_excel_input' may be 'static' less... (⌘F1) # 会建议你 要么 加上@staticmethod ,要么放到class外面去 def __select_excel_output(self, entry_box): path = askdirectory() entry_box.delete(0, len(entry_box.get())) entry_box.insert(0, path) return path def __make_select_confirm(self, frame, row, column): button = tk.Button(frame, width=15, text='开启自动打包') button.grid(row=row, column=column) button['command'] = (lambda: self.__click_browser()) def __click_browser(self): if len(self.project_path.get()) > 0 and len( self.plist_path.get()) > 0 and len(self.target.get()) > 0: self.quit() # 去除了 plist 文件输入框 这里根据 选择重新配置 """ if self.is_release.get(): self.plist_path.set(os.path.abspath('.') + '/ipaAppStore.plist') else: self.plist_path.set(os.path.abspath('.') + '/ipaAdHoc.plist') """ self.source.on_next( (self.project_path.get(), self.plist_path.get(), self.target.get(), self.is_workspace.get(), self.is_release.get(), self.is_http.get(), self.fir_token.get(), self.store_name.get(), self.store_pwd.get()))
def create_window(): s = Subject() q.append(s) observer.on_next(add_ref(s, refCountDisposable))
class ControlledSubject(ObservableBase, Observer): def __init__(self, enable_queue=True, scheduler=None): super(ControlledSubject, self).__init__() self.subject = Subject() self.enable_queue = enable_queue self.queue = [] if enable_queue else None self.requested_count = 0 self.requested_disposable = Disposable.empty() self.error = None self.has_failed = False self.has_completed = False self.scheduler = scheduler or current_thread_scheduler def _subscribe_core(self, observer): return self.subject.subscribe(observer) def on_completed(self): self.has_completed = True if not self.enable_queue or len(self.queue) == 0: self.subject.on_completed() self.dispose_current_request() else: self.queue.append(OnCompleted()) def on_error(self, error): self.has_failed = True self.error = error if not self.enable_queue or len(self.queue) == 0: self.subject.on_error(error) self.dispose_current_request() else: self.queue.append(OnError(error)) def on_next(self, value): if self.requested_count <= 0: self.enable_queue and self.queue.append(OnNext(value)) else: self.requested_count -= 1 if self.requested_count == 0: self.dispose_current_request() self.subject.on_next(value) def _process_request(self, number_of_items): if self.enable_queue: while len(self.queue) > 0 and (number_of_items > 0 or self.queue[0].kind != 'N'): first = self.queue.pop(0) first.accept(self.subject) if first.kind == 'N': number_of_items -= 1 else: self.dispose_current_request() self.queue = [] return number_of_items def request(self, number): self.dispose_current_request() def action(scheduler, i): remaining = self._process_request(i) stopped = self.has_completed and self.has_failed if not stopped and remaining > 0: self.requested_count = remaining def dispose(): self.requested_count = 0 return AnonymousDisposable(dispose) # Scheduled item is still in progress. Return a new # disposable to allow the request to be interrupted # via dispose. self.requested_disposable = self.scheduler.schedule(action, state=number) return self.requested_disposable def dispose_current_request(self): if self.requested_disposable: self.requested_disposable.dispose() self.requested_disposable = None