def light_state_equals(light_id: str, light_final_state: str, test_context: Data.TestContext, loop: asyncio.AbstractEventLoop, awaitables: List[RxObservable]): assert isinstance(light_id, str) assert isinstance(light_final_state, str) def take_while_state(payload: Structs.s_lights_state) -> bool: return payload.newState != light_final_state timeout_sec = 10.0 lightbulb: Data.Lightbulb = test_context.lightbulbs[light_id] observable: RxObservable = lightbulb.light_state.pipe( RxOp.timeout(timeout_sec), RxOp.observe_on(scheduler=AsyncIOScheduler(loop)), RxOp.take_while(take_while_state, inclusive=True), ) observable.subscribe(on_next=lambda i: print(f"on_next: {i}"), on_error=lambda e: print(f"on_error: {e}"), on_completed=lambda: print("on_completed"), scheduler=AsyncIOScheduler(loop)) awaitables.append(observable)
async def go(): scheduler = AsyncIOScheduler(loop) ran = False def action(scheduler, state): nonlocal ran ran = True scheduler.schedule(action) await asyncio.sleep(0.1) assert ran is True
def go(): scheduler = AsyncIOScheduler(loop) ran = False def action(scheduler, state): nonlocal ran ran = True scheduler.schedule(action) yield from asyncio.sleep(0.1, loop=loop) assert ran is True
async def go(): ran = False scheduler = AsyncIOScheduler(loop) def action(scheduler, state): nonlocal ran ran = True d = scheduler.schedule_relative(0.05, action) d.dispose() await asyncio.sleep(0.3) assert ran is False
async def go(): scheduler = AsyncIOScheduler(loop) starttime = loop.time() endtime = None def action(scheduler, state): nonlocal endtime endtime = loop.time() scheduler.schedule_relative(0.2, action) await asyncio.sleep(0.3) assert endtime is not None diff = endtime - starttime assert diff > 0.18
async def to_agen(obs, loop, get_feedback_observer): queue = asyncio.Queue() index = 0 def on_next(i): nonlocal index queue.put_nowait(i) if isinstance(i, OnNext): index += 1 if index == 500: index = 0 obv = get_feedback_observer() if obv is not None: obv.on_next((i.value[0], queue.qsize())) # todo: mapper disposable = obs.pipe(ops.materialize()).subscribe( on_next=on_next, on_error=lambda e: print("to_agen error: {}".format(e)), scheduler=AsyncIOScheduler(loop=loop)) while True: try: i = queue.get_nowait() except asyncio.QueueEmpty: i = await queue.get() if isinstance(i, OnNext): yield i.value queue.task_done() elif isinstance(i, OnError): disposable.dispose() raise (Exception(i.exception)) else: disposable.dispose() break
def observable_to_async_queue(obs, loop): queue = asyncio.Queue() disposable = obs.subscribe( on_next=lambda x: loop.call_soon_threadsafe(queue.put_nowait, x), scheduler=AsyncIOScheduler(loop=loop), ) return queue, disposable
def app(loop, config, store, jwt_key, cookie_key): logger.debug('Creating application') from os.path import dirname, normpath from warnings import warn from aiohttp.web import Application from aiohttp_jinja2 import setup as jinja_setup from jinja2 import FileSystemLoader from rx.scheduler.eventloop import AsyncIOScheduler from bigur.auth.middlewares import session app = Application(middlewares=[session]) app['config'] = config app['jwt_keys'] = [jwt_key] app['cookie_key'] = cookie_key app['scheduler'] = AsyncIOScheduler(loop) app['provider'] = {} templates = normpath(dirname(__file__) + '/../templates') jinja_setup(app, loader=FileSystemLoader(templates)) class WarnWrapper: def __getattr__(self, name): warn('Use of app[\'store\'] is depricated', DeprecationWarning, stacklevel=2) return getattr(store, name) def __setattr__(self, name, value): warn('Use of app[\'store\'] is depricated', DeprecationWarning, stacklevel=2) return setattr(store, name, value) app['store'] = WarnWrapper() return app
def run(main_f, drivers): fake_sinks = {k: Subject() for k in drivers} sources = {k: drivers[k](fake_sinks[k]) for k in drivers} sinks = main_f(sources) for k in fake_sinks: if k in sinks: sinks[k].subscribe(fake_sinks[k], scheduler=AsyncIOScheduler(loop))
def test_asyncio_schedule_now_units(self): loop = asyncio.get_event_loop() scheduler = AsyncIOScheduler(loop) diff = scheduler.now yield from asyncio.sleep(0.1) diff = scheduler.now - diff assert timedelta(milliseconds=80) < diff < timedelta(milliseconds=180)
def open(self): scheduler = AsyncIOScheduler(asyncio.get_event_loop()) print("WebSocket opened") # A Subject is both an observable and observer, so we can both subscribe # to it and also feed (send) it with new values self.subject = Subject() # Get all distinct key up events from the input and only fire if long enough and distinct searcher = self.subject.pipe( ops.map(lambda x: x["term"]), ops.filter(lambda text: len(text) > 2), # Only if the text is longer than 2 characters ops.debounce(0.750), # Pause for 750ms ops.distinct_until_changed(), # Only if the value has changed ops.flat_map_latest(search_wikipedia) ) def send_response(x): self.write_message(x.body) def on_error(ex): print(ex) searcher.subscribe(send_response, on_error, scheduler=scheduler)
async def main(loop): obs = intervalRead(5, foo) obs.subscribe( # on_next=lambda item: print(item), # on_next=on_next_test(item), on_next=lambda item: on_next_test(item), scheduler=AsyncIOScheduler(loop) )
def __await__(self) -> Any: """Awaits the given observable. Returns: The last item of the observable sequence. """ from ..operators.tofuture import _to_future loop = asyncio.get_event_loop() return iter(self.pipe(_to_future(scheduler=AsyncIOScheduler(loop=loop))))
def __init__(self, event_loop, timeout_msec=-1): self._timeout_msec = timeout_msec self._event = Subject() self._wait_exit = False self.result_ob = None self._event_loop = event_loop self._timer = None self.scheduler = AsyncIOScheduler(loop=event_loop) self.done = asyncio.Future() print('done init')
def main(): loop = asyncio.get_event_loop() aio_scheduler = AsyncIOScheduler(loop=loop) run( Component(call=functools.partial(model_publisher, aio_scheduler), input=ModelPublisherSource), ModelPublisherDrivers( kafka=kafka.make_driver(), http=http.make_driver(), file=file.make_driver(), argv=argv.make_driver(), ))
def main(): loop = asyncio.get_event_loop() client = MagixHttpClient(kMagixHost) observer = AxsisObserver(client) client.observe(channel=kChannel).pipe( ops.filter( lambda event: json.loads(event.data).get('target') == 'axsis'), ops.map(lambda event: Message.from_json(event.data, payload_cls=AxsisMessage)) # TODO proxy object or optimize somehow ).subscribe(observer, scheduler=AsyncIOScheduler(loop)) loop.run_forever() pass
def go(loop): scheduler = AsyncIOScheduler(loop) xs = rx.from_([x for x in range(10)], scheduler=scheduler) gen = xs.pipe(to_async_generator()) # Wish we could write something like: # ys = (x for x in yield from gen()) while True: x = yield from gen() if x is None: break print(x)
def main(): loop = asyncio.get_event_loop() # loop.set_debug(True) aio_scheduler = AsyncIOScheduler(loop=loop) run( Component(call=partial(deepspeech_server, aio_scheduler), input=DeepspeechSource), DeepspeechDrivers(deepspeech=deepspeech.make_driver(), httpd=httpd.make_driver(), argv=argv.make_driver(), logging=logging.make_driver(), file=file.make_driver()), loop=loop, )
def main(): loop = asyncio.get_event_loop() # loop.set_debug(True) aio_scheduler = AsyncIOScheduler(loop=loop) run( Component( call=partial(makinage, aio_scheduler), input=MakiNageSource), MakiNageDrivers( kafka=kafka.make_driver(), http=http.make_driver(), file=file.make_driver(), argv=argv.make_driver(), ), loop=loop, )
async def observable_to_async_iterable(obs, loop): queue = asyncio.Queue() disposable = obs.pipe(ops.materialize()).subscribe( on_next=lambda x: loop.call_soon_threadsafe(queue.put_nowait, x), scheduler=AsyncIOScheduler(loop=loop), ) while True: x = await queue.get() if isinstance(x, OnNext): yield x.value queue.task_done() elif isinstance(x, OnError): disposable.dispose() raise Exception(f"Observable OnError: {x.value}") else: disposable.dispose() break
async def test_buffered_aio(): sched = AsyncIOScheduler(asyncio.get_running_loop()) events = [] outs = [] @rxpy_backpress.wrap_aio async def block(x): events.append(x) await asyncio.sleep(0.5) return x rx.interval(0.05, scheduler=sched).pipe(rxpy_backpress.BackpressBuffered(), ops.flat_map(block)).subscribe( on_next=outs.append, scheduler=sched) await asyncio.sleep(1.1) assert events == [0, 1, 2] assert outs == [0, 1]
async def main(loop): scheduler = AsyncIOScheduler(loop) finder = WikipediaFinder(loop) stream = Subject() def task(term): t = loop.create_task(finder.search(term)) return rx.from_future(t) def pretty(result): parsed = json.loads(result) print(json.dumps(parsed, sort_keys=True, indent=2)) stream.pipe( ops.debounce(0.750), ops.distinct(), ops.flat_map_latest(task) ).subscribe(pretty, scheduler=scheduler) def reader(): line = sys.stdin.readline().strip() stream.on_next(line) loop.add_reader(sys.stdin.fileno(), reader)
def test_flat_map_async(self): actual_next = None loop = asyncio.get_event_loop() scheduler = AsyncIOScheduler(loop=loop) def mapper(i): async def _mapper(i): return i + 1 return asyncio.ensure_future(_mapper(i)) def on_next(i): nonlocal actual_next actual_next = i async def test_flat_map(): x = Subject() x.pipe(ops.flat_map(mapper)).subscribe(on_next, scheduler=scheduler) x.on_next(10) await asyncio.sleep(0.1) loop.run_until_complete(test_flat_map()) assert actual_next == 11
import rx from rx.scheduler.eventloop import AsyncIOScheduler import threading import asyncio def foo(): print("foo from {}".format(threading.get_ident())) return 2 loop = asyncio.get_event_loop() done = loop.create_future() scheduler = AsyncIOScheduler(loop=loop) number = rx.start(foo, scheduler=scheduler) print("subscribing...") number.subscribe( lambda i: print("on_next: {} from {}".format(i, threading.get_ident())), lambda e: print("on_error: {}".format(e)), lambda: done.set_result(0) ) print("staring mainloop from {}".format(threading.get_ident())) loop.run_until_complete(done) loop.close()
def open(self): print('Connection opened') loop = asyncio.get_event_loop() aio_scheduler = AsyncIOScheduler(loop=loop) self.api.transmissions().subscribe(self.send_transmission, scheduler=aio_scheduler)
def main(loop): obs = async_iterable_to_observable(ticker(0.2, 10), loop) Pipeline.map(lambda x: 2 * x).map(lambda x: x * 2).to_observable(obs).subscribe( lambda x: print(x), scheduler=AsyncIOScheduler(loop=loop) )
def test_asyncio_schedule_now(self): loop = asyncio.get_event_loop() scheduler = AsyncIOScheduler(loop) diff = scheduler.now - datetime.utcfromtimestamp(loop.time()) assert abs(diff) < timedelta(milliseconds=1)
async def go(loop): scheduler = AsyncIOScheduler(loop) ai = rx.range(0, 10, scheduler=scheduler).pipe(to_async_iterable()) async for x in ai: print("got %s" % x)
import cyclotron_std.io.file as file import audio_encode_server.encoder as encoder import audio_encode_server.s3 as s3 import audio_encode_server.inotify as inotify import aionotify Drivers = namedtuple('Drivers', ['encoder', 'httpd', 's3', 'file', 'inotify', 'argv']) Source = namedtuple('Source', ['encoder', 'httpd', 's3', 'file', 'inotify', 'argv']) Sink = namedtuple('Sink', ['encoder', 'httpd', 's3', 'file', 'inotify']) s3_scheduler = ThreadPoolScheduler(max_workers=1) encode_scheduler = ThreadPoolScheduler(max_workers=4) aio_scheduler = AsyncIOScheduler(asyncio.get_event_loop()) def parse_config(file_data): return file_data.pipe( ops.map(lambda i: json.loads( i, object_hook=lambda d: namedtuple('x', d.keys())(*d.values()))), ops.share(), ) def create_arg_parser(): parser = argparse.ArgumentParser("audio encode server") parser.add_argument('--config', required=True, help="Path of the server configuration file")