class MergerEndCondition: def __init__(self, count, grace_period): self._count = count self._grace_period = grace_period self._force_end = Event() async def wait_for_end(self): await self._wait_for_any(self._no_connections_for_a_while(), self._force_end.wait()) async def _no_connections_for_a_while(self): while True: await self._count.wait_until_empty() if (await self._grace_period_without_connections()): return async def _grace_period_without_connections(self): try: await asyncio.wait_for(self._count.wait_until_not_empty(), timeout=self._grace_period) return False except asyncio.TimeoutError: return True async def _wait_for_any(self, *coros): _, p = await asyncio.wait(coros, return_when=asyncio.FIRST_COMPLETED) for task in p: task.cancel() def force_end(self): self._force_end.set()
class AsyncDict(MutableMapping, EmptyWaitMixin): "Tiny dict wrapper that lets us await until it's empty." def __init__(self): EmptyWaitMixin.__init__(self) self._dict = {} self._added = Event() def __getitem__(self, key): return self._dict[key] def __setitem__(self, key, value): self._dict[key] = value self._is_not_empty() self._added.set() self._added.clear() def __delitem__(self, key): del self._dict[key] if not self._dict: self._is_empty() def __iter__(self): return iter(self._dict) def __len__(self): return len(self._dict) async def wait_for_key(self, key): while True: if key in self: return self[key] await self._added.wait()
class Merger: def __init__(self, stream_builder, grace_period_time, merge_strategy, canonical_stream): self._stream_builder = stream_builder self._stream_count = AsyncCounter() self._merge_strategy = merge_strategy self.canonical_stream = canonical_stream self._closing = False self._ended = Event() self._end_condition = MergerEndCondition(self._stream_count, grace_period_time) asyncio.ensure_future(self._lifetime()) @classmethod def build(cls, *, config_merger_grace_period_time, config_replay_merge_strategy, **kwargs): canonical_replay = OutsideSourceReplayStream() merge_strategy = config_replay_merge_strategy.build( canonical_replay, **kwargs) stream_builder = ConnectionReplayStream.build return cls(stream_builder, config_merger_grace_period_time, merge_strategy, canonical_replay) @contextmanager def _stream_tracking(self, connection): stream = self._stream_builder(connection) self._merge_strategy.stream_added(stream) self._stream_count.inc() try: yield stream finally: self._merge_strategy.stream_removed(stream) self._stream_count.dec() async def handle_connection(self, connection): if self._closing: raise CannotAcceptConnectionError( "Writer connection arrived after replay writing finished") with self._stream_tracking(connection) as stream: await stream.read_header() self._merge_strategy.new_header(stream) while not stream.ended(): await stream.read() self._merge_strategy.new_data(stream) def close(self): self._end_condition.force_end() async def _lifetime(self): await self._end_condition.wait_for_end() self._closing = True await self._stream_count.wait_until_empty() self._merge_strategy.finalize() self.canonical_stream.finish() self._ended.set() async def wait_for_ended(self): await self._ended.wait()
def __init__(self, connection_producer, database, connections, replays, bookkeeper, prometheus_port): self._connection_producer = connection_producer self._database = database self._connections = connections self._replays = replays self._bookkeper = bookkeeper self._prometheus_port = prometheus_port self._stopped = Event() self._stopped.set()
def __init__(self, merger, sender, bookkeeper, timeout, game_id): self.merger = merger self.sender = sender self.bookkeeper = bookkeeper self._game_id = game_id self._connections = set() self._timeout = timeout self._ended = Event() asyncio.ensure_future(self._lifetime()) self._force_close = asyncio.ensure_future(self._timeout_force_close())
class Sender: def __init__(self, delayed_stream): self._stream = delayed_stream self._conn_count = AsyncCounter() self._ended = Event() asyncio.ensure_future(self._lifetime()) @classmethod def build(cls, stream, **kwargs): delayed_stream = DelayedReplayStream.build(stream, **kwargs) return cls(delayed_stream) @contextmanager def _connection_count(self): self._conn_count.inc() try: yield finally: self._conn_count.dec() async def handle_connection(self, connection): if self._stream.ended(): raise CannotAcceptConnectionError( "Reader connection arrived after replay ended") with self._connection_count(): await self._write_header(connection) await self._write_replay(connection) async def _write_header(self, connection): header = await self._stream.wait_for_header() if header is None: raise MalformedDataError("Malformed replay header") await connection.write(header.data) async def _write_replay(self, connection): position = 0 while True: data = await self._stream.wait_for_data(position) if not data: break position += len(data) conn_open = await connection.write(data) if not conn_open: break def close(self): pass async def _lifetime(self): await self._stream.wait_for_ended() await self._conn_count.wait_until_empty() self._ended.set() async def wait_for_ended(self): await self._ended.wait()
def __init__(self, stream_builder, grace_period_time, merge_strategy, canonical_stream): self._stream_builder = stream_builder self._stream_count = AsyncCounter() self._merge_strategy = merge_strategy self.canonical_stream = canonical_stream self._closing = False self._ended = Event() self._end_condition = MergerEndCondition(self._stream_count, grace_period_time) asyncio.ensure_future(self._lifetime())
class HeaderEventMixin: """ Useful when the class adds header via a coroutine. """ def __init__(self): self._header_read_or_ended = Event() def _signal_header_read_or_ended(self): self._header_read_or_ended.set() async def wait_for_header(self): await self._header_read_or_ended.wait() return self.header
def __init__(self, stream, interval, delay): self._stream = stream self._interval = interval self._delay = delay self._ended = False # Last item in deque size n+1 is from n intervals ago stamp_number = math.ceil(self._delay / self._interval) + 1 self._stamps = deque([0], maxlen=stamp_number) self._new_stamp = Event() self._stamp_coro = asyncio.ensure_future(self._periodic_stamp()) asyncio.ensure_future(self._wait_for_stream_end())
class EndedEventMixin: def __init__(self): self._ended = Event() def _end(self): self._ended.set() def ended(self): return self._ended.is_set() async def wait_for_ended(self): await self._ended.wait()
def __init__(self, merger, sender, bookkeeper, config, game_id): self.merger = merger self.sender = sender self.bookkeeper = bookkeeper self._game_id = game_id self._connections = set() self._ended = Event() self._lifetime_coroutines = [ asyncio.ensure_future(self._force_closing(config.forced_end_time)), asyncio.ensure_future(self._write_phase(config.grace_period)) ] asyncio.ensure_future(self._lifetime())
def __init__(self): self.name = self.__class__.__name__ self.state = 0 self.configured = Event() # flag is set once configured. self.ready = Event() # allows other modules to depend on this module self.stopped = Event() # set when module is stopped self.stopped.set() # stopped initially self.config_dependencies = [] self.init_dependencies = [] self.config_task = None self.init_task = None self.config = ConfigParser()
class EmptyWaitMixin: def __init__(self): self._empty = Event() self._not_empty = Event() self._is_empty() async def wait_until_empty(self): await self._empty.wait() async def wait_until_not_empty(self): await self._not_empty.wait() async def wait_until_empty_for(self, period): while True: await self.wait_until_empty() if await self._empty_for(period): return async def _empty_for(self, period): try: await asyncio.wait_for(self.wait_until_not_empty(), period) return False except asyncio.TimeoutError: return True def _is_empty(self): self._empty.set() self._not_empty.clear() def _is_not_empty(self): self._not_empty.set() self._empty.clear()
async def DoLogin(self): login_completed = Event() async def HandleLogin(username, password): result = await self.session.get("/user/login", { "email": username, "password": password }) login_completed.set() async def HandleCreateAccountStep1(username): await self.get("/user/register", {"email": username}) async def HandleCreateAccountStep2(confirmation_code, username, password, otp_secret): await self.get( "/user/register_confirm", { "confirmation_code": confirmation_code, "password": password, "otp_secret": otp_secret }) await self.session.get("/user/login", { "email": username, "password": password }) login_completed.set() login_dialog = LoginDialog( None, HandleLogin=HandleLogin, HandleCreateAccountStep1=HandleCreateAccountStep1, HandleCreateAccountStep2=HandleCreateAccountStep2) login_dialog.Show() await login_completed.wait() login_dialog.Hide()
async def AsyncShowDialog(dlg): closed = Event() def end_dialog(return_code): dlg.SetReturnCode(return_code) dlg.Hide() closed.set() async def on_button(event): # Same code as in wxwidgets:/src/common/dlgcmn.cpp:OnButton # to automatically handle OK, CANCEL, APPLY,... buttons id = event.GetId() if id == dlg.GetAffirmativeId(): if dlg.Validate() and dlg.TransferDataFromWindow(): end_dialog(id) elif id == wx.ID_APPLY: if dlg.Validate(): dlg.TransferDataFromWindow() elif id == dlg.GetEscapeId() or (id == wx.ID_CANCEL and dlg.GetEscapeId() == wx.ID_ANY): end_dialog(wx.ID_CANCEL) else: event.Skip() async def on_close(event): closed.set() dlg.Hide() AsyncBind(wx.EVT_CLOSE, on_close, dlg) AsyncBind(wx.EVT_BUTTON, on_button, dlg) dlg.Show() await closed.wait() return dlg.GetReturnCode()
async def test_tasklet_grouping(event_loop: AbstractEventLoop): no_tasks = 2 gates = [Event() for _ in range(no_tasks)] statuses = [event_loop.create_future() for _ in range(no_tasks)] def on_done(status: Future, task: Task) -> None: status.set_result("cancelled" if task.cancelled() else "done") def group_by(_, group: int) -> int: return group @tasklet async def suspend(e: Event, group: int): await e.wait() suspend.group_by = group_by (await suspend(gates[0], group=1)).add_done_callback(partial(on_done, statuses[0])) (await suspend(gates[1], group=2)).add_done_callback(partial(on_done, statuses[1])) for g in gates: g.set() await wait_for(gather(*statuses), timeout=1) assert statuses[0].result() == "done" assert statuses[1].result() == "done"
def get(): manual_end = Event(loop=event_loop) async def manual_wait(*args, **kwargs): await manual_end.wait() ended_wait_mock = asynctest.CoroutineMock(side_effect=manual_wait) return (manual_end, ended_wait_mock)
class ServesConnections: """ Abstract class, represents serving connections. Handles connections until told to stop, then refuses to handle more. After that, once all connections are over, performs some housekeeping then reports as finished. """ def __init__(self): self._denies_connections = Event() self._connection_count = AsyncCounter() self._ended = Event() asyncio.ensure_future(self._lifetime()) def _accepts_connections(self): return not self._denies_connections.is_set() def stop_accepting_connections(self): self._denies_connections.set() async def _wait_until_all_connections_end(self): await self._denies_connections.wait() await self._connection_count.wait_until_empty() async def handle_connection(self, connection): if not self._accepts_connections(): raise CannotAcceptConnectionError( f"{self} no longer accepts connections") try: self._connection_count.inc() return await self._handle_connection(connection) finally: self._connection_count.dec() async def _lifetime(self): await self._wait_until_all_connections_end() await self._after_connections_end() self._ended.set() async def wait_for_ended(self): await self._ended.wait() async def _handle_connection(self, connection): raise NotImplementedError async def _after_connections_end(self): raise NotImplementedError
def get(): manual_end = Event(loop=event_loop) async def manual_wait(*args, **kwargs): await manual_end.wait() blockable = asynctest.CoroutineMock(side_effect=manual_wait, _lock=manual_end) return blockable
def __init__(self, protocol: H2mqProtocol, *, loop=None): self._protocol = protocol self._listeners = {} self._connectors = {} self._peers = deque() self._peers_not_empty = Event() if loop is None: loop = asyncio.get_event_loop() self._loop = loop
def __init__(self, config, type, store, loop=None): super(BaseAccessor, self).__init__() self.config = config self.type = type self._store = weakref.ref(store) self.loop = loop self.logger = logging.getLogger('accessor[{}]'.format(self.type)) self.connected = False self.connecting = False self.disconnecting = False self._connected_event = Event(loop=self.loop) self._host = None self._port = None self._username = None self._password = None if self.CHECK_CONFIG: self.check_config()
class _WaitGroup(WaitGroup): def __init__(self) -> None: self._counter = 0 self._event = Event() self._err: Optional[BaseException] = None def __bool__(self) -> bool: return not self._event.is_set() def __len__(self) -> int: return self._counter def __enter__(self) -> None: self._counter += 1 self._event.clear() def __exit__( self, _: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Literal[True]: if self._err is not None: pass elif exc_value is not None: self._err = exc_value self._event.set() else: self._counter -= 1 if self._counter < 0: raise RuntimeError() elif len(self) == 0: self._event.set() return True def __call__(self, f: T) -> T: if not iscoroutinefunction(f): raise ValueError() else: async def cont(*args: Any, **kwags: Any) -> None: with self: await f(*args, **kwags) return cast(T, cont) def maybe_throw(self) -> None: if self._err is not None: raise self._err async def wait(self) -> None: await sleep(0) if len(self): await self._event.wait() if self._err is not None: raise self._err
async def background_update() -> None: async for _ in schedule(Event(), min_time=0, max_time=config.polling_rate): await depopulate(conn) try: async for words in tmux_words( max_length=max_length, unifying_chars=unifying_chars ): await populate(conn, words=words) except TmuxError as e: message = f"failed to fetch tmux{linesep}{e}" log.warn("%s", message)
class DataEventMixin: """ Useful when the class adds data via a coroutine. """ def __init__(self): self._new_data_or_ended = Event() def _signal_new_data_or_ended(self): self._new_data_or_ended.set() self._new_data_or_ended.clear() def wait_for_data(self, position=None): if position is None: position = len(self.data) return self._wait_for_data(position) async def _wait_for_data(self, position=None): while position >= len(self.data) and not self.ended(): await self._new_data_or_ended.wait() if position < len(self.data): return self.data[position:] return b""
class _BaseChan(Chan[T], AsyncIterator[T]): def __init__(self) -> None: self._onclose = Event() def __bool__(self) -> bool: return not self._onclose.is_set() async def __aenter__(self) -> Chan[T]: return self async def __aexit__(self, *_: Any) -> None: await self.aclose() def __aiter__(self) -> AsyncIterator[T]: return self async def __anext__(self) -> T: try: return await self.recv() except ChanClosed: raise StopAsyncIteration() def __lt__(self, item: T) -> None: self.try_send(item) def __gt__(self, _: Any) -> T: return self.try_recv() async def __lshift__(self, item: T) -> None: await self.send(item) async def __rlshift__(self, _: Any) -> T: return await self.recv() async def aclose(self) -> None: await sleep(0) self._onclose.set() async def _on_closed(self) -> Chan[T]: await self._onclose.wait() return self
async def test_tasklet_cancellation(event_loop: AbstractEventLoop): gates = [Event(), Event()] statuses = [event_loop.create_future(), event_loop.create_future()] def on_done(status: Future, task: Future) -> None: status.set_result("cancelled" if task.cancelled() else "done") @tasklet async def suspend(e: Event): await e.wait() (await suspend(gates[0])).add_done_callback(partial(on_done, statuses[0])) # Next call should cancel the previously scheduled task (await suspend(gates[1])).add_done_callback(partial(on_done, statuses[1])) # Opening the gate should complete the newly scheduled task gates[1].set() await wait_for(gather(*statuses), timeout=1) assert statuses[0].result() == "cancelled" assert statuses[1].result() == "done"
async def merge(*aits: AsyncIterable[_T]) -> AsyncIterator[_T]: ev = Event() q: Queue = Queue(maxsize=1) end = create_task(ev.wait()) g = gather(*(_merge_helper(q, end=end, ait=ait) for ait in aits)) try: while True: fut = create_task(q.get()) done, _ = await wait((g, fut), return_when=FIRST_COMPLETED) if fut in done: item = await fut yield item if g in done: break except CancelledError: await cancel(g) finally: ev.set() await g
class Timestamp: def __init__(self, stream, interval, delay): self._stream = stream self._interval = interval self._delay = delay self._ended = False # Last item in deque size n+1 is from n intervals ago stamp_number = math.ceil(self._delay / self._interval) + 1 self._stamps = deque([0], maxlen=stamp_number) self._new_stamp = Event() self._stamp_coro = asyncio.ensure_future(self._periodic_stamp()) asyncio.ensure_future(self._wait_for_stream_end()) def _stamp(self, pos): self._stamps.append(pos) self._new_stamp.set() self._new_stamp.clear() async def _periodic_stamp(self): while True: self._stamp(len(self._stream.data)) await asyncio.sleep(self._interval) async def _wait_for_stream_end(self): await self._stream.wait_for_ended() self._stamp_coro.cancel() self._stamps.clear() self._stamp(len(self._stream.data)) self._ended = True async def timestamps(self): while not self._ended: await self._new_stamp.wait() yield self._stamps[0] yield self._stamps[0]
def __init__(self): self.data = ReplayStreamData(self._data_length, self._data_slice, self._data_bytes, self._data_view) self.future_data = ReplayStreamData(self._future_data_length, self._future_data_slice, self._future_data_bytes, self._future_data_view) self._ended = Event() self._header_read_or_ended = Event() self._new_data_or_ended = Event()
async def AsyncShowDialog(dlg): if type(dlg) in [ wx.FileDialog, wx.DirDialog, wx.FontDialog, wx.ColourDialog, wx.MessageDialog ]: raise Exception( "This type of dialog cannot be shown modless, please use 'AsyncShowDialogModal'" ) closed = Event() def end_dialog(return_code): dlg.SetReturnCode(return_code) dlg.Hide() closed.set() async def on_button(event): # Same code as in wxwidgets:/src/common/dlgcmn.cpp:OnButton # to automatically handle OK, CANCEL, APPLY,... buttons id = event.GetId() if id == dlg.GetAffirmativeId(): if dlg.Validate() and dlg.TransferDataFromWindow(): end_dialog(id) elif id == wx.ID_APPLY: if dlg.Validate(): dlg.TransferDataFromWindow() elif id == dlg.GetEscapeId() or (id == wx.ID_CANCEL and dlg.GetEscapeId() == wx.ID_ANY): end_dialog(wx.ID_CANCEL) else: event.Skip() async def on_close(event): closed.set() dlg.Hide() AsyncBind(wx.EVT_CLOSE, on_close, dlg) AsyncBind(wx.EVT_BUTTON, on_button, dlg) dlg.Show() await closed.wait() return dlg.GetReturnCode()
class Server: def __init__(self, connection_producer, database, connections, replays, bookkeeper, prometheus_port): self._connection_producer = connection_producer self._database = database self._connections = connections self._replays = replays self._bookkeper = bookkeeper self._prometheus_port = prometheus_port self._stopped = Event() self._stopped.set() @classmethod def build(cls, *, dep_connection_producer=ConnectionProducer.build, dep_database=Database.build, config): database = dep_database(config.db) bookkeeper = Bookkeeper.build(database, config.storage) replays = Replays.build(bookkeeper, config.replay) conns = Connections.build(replays, config.server.connection_header_read_timeout) producer = dep_connection_producer( conns.handle_connection, config.server.port, config.server.connection_linger_time) return cls(producer, database, conns, replays, bookkeeper, config.server.prometheus_port) async def start(self): if self._prometheus_port is not None: prometheus_client.start_http_server(self._prometheus_port) await self._database.start() await self._connection_producer.start() self._stopped.clear() async def stop(self): await self._connection_producer.stop() await self._connections.close_all() await self._replays.stop_all() await self._connections.wait_until_empty() await self._database.stop() self._stopped.set() async def run(self): await self.start() await self._stopped.wait()
async def test_producer_sanity_check(unused_tcp_port): check_ran = Event() async def check(connection): data_in = await connection.readexactly(3) assert data_in == b"foo" is_open = await connection.write(b"bar") assert is_open connection.close() await connection.wait_closed() check_ran.set() prod = ConnectionProducer(check, unused_tcp_port, 0.1) await prod.start() r, w = await asyncio.open_connection('127.0.0.1', unused_tcp_port) w.write(b"foo") await w.drain() data = await r.read() assert data == b"bar" w.close() await check_ran.wait() await prod.stop()
class MicroModule(object): """ definition of the basic module with life-cycle methods """ CONFIGURED = 1 READY = 2 STOPPED = 0 def __init__(self): self.name = self.__class__.__name__ self.state = 0 self.configured = Event() # flag is set once configured. self.ready = Event() # allows other modules to depend on this module self.stopped = Event() # set when module is stopped self.stopped.set() # stopped initially self.config_dependencies = [] self.init_dependencies = [] self.config_task = None self.init_task = None self.config = ConfigParser() def add_config_dependency(self, dep): if dep.configured not in self.init_dependencies: self.config_dependencies.append(dep.configured) def add_init_depencency(self, dep): if dep.ready not in self.init_dependencies: self.init_dependencies.append(dep.ready) def configure(self): """ first step: handles loading default settings and getting a handle to the app """ default_settings = os.path.join(__path__[0], self.name.lower() + '.ini') logger.debug('default settings: ' + default_settings) self.config.read(default_settings) if not self.config_task: logger.debug('configuring module ' + self.name) self.config_task = asyncio.ensure_future( trigger_on_deps(self.config_dependencies,self.configured)) self._configure() if not self.configured.is_set(): self.state = MicroModule.CONFIGURED self.configured.set() def _configure(self): logger.info('_configure: override when subclassing') async def init(self): """ second step: do any processing required before being ran. calls configure if the config event isn't set loads any resources ie thread pools, db connections etc... """ if not self.configured.is_set(): self.configure() await self.configured.wait() logger.debug('initializing module ' + self.name) await self._init() self.state = MicroModule.READY self.ready.set() self.stopped.clear() async def _init(self): logger.info('_init: override when subclassing') async def stop(self): """ freeze the module so that further processing is blocked until restarted. This should clean up any resources not involved in maintaining state thus a restart should pick up where it left off """ logger.debug('closing module ' + self.name) self.state = MicroModule.STOPPED self.ready.clear() self.stopped.set() def requires_init(self): if not self.ready.is_set(): asyncio.ensure_future(self.init()) async def init(func): await self.ready() await func return init
class H2mqTransport: def __init__(self, protocol: H2mqProtocol, *, loop=None): self._protocol = protocol self._listeners = {} self._connectors = {} self._peers = deque() self._peers_not_empty = Event() if loop is None: loop = asyncio.get_event_loop() self._loop = loop @property def loop(self): return self._loop @property def protocol(self): return self._protocol def connection_made(self, h2_protocol): self._peers.append(h2_protocol) self._peers_not_empty.set() self._loop.call_soon(self._protocol.connection_made, h2_protocol) def connection_lost(self, h2_protocol): try: self._peers.remove(h2_protocol) except ValueError: pass if not self._peers: self._peers_not_empty.clear() self._protocol.connection_lost(h2_protocol) def event_received(self, event, stream=None): self._protocol.event_received(event, stream=stream) async def bind(self, endpoint: str): proto, address = parse_endpoint(endpoint) listener = self._listeners[endpoint] = self._protocol.listener_factory(proto)(self, address) await listener.open() async def unbind(self, endpoint: str): listener = self._listeners.pop(endpoint, None) if listener is not None: await listener.close() async def connect(self, endpoint): proto, address = parse_endpoint(endpoint) connector = self._connectors[endpoint] = self._protocol.connector_factory(proto)(self, address) await connector.open() async def disconnect(self, endpoint): connector = self._connectors.pop(endpoint, None) if connector is not None: await connector.close() async def create_stream(self, headers): await self._peers_not_empty.wait() peer = self._peers.popleft() self._peers.append(peer) # TODO: handle NoAvailableStreamIDError here return peer.get_stream(headers)