async def test_call_on_cancel_example(): f = asyncio.Future() group = aio.Group() group.spawn(aio.call_on_cancel, f.set_result, 123) assert not f.done() await group.async_close() assert f.result() == 123
async def create_backend(path: Path, low_size: int, high_size: int, enable_archive: bool, disable_journal: bool ) -> 'Backend': """Create backend""" db = await database.create_database(path, disable_journal) try: first_id = await db.get_first_id() last_id = await db.get_last_id() except BaseException: await aio.uncancellable(db.async_close()) raise backend = Backend() backend._path = path backend._low_size = low_size backend._high_size = high_size backend._enable_archive = enable_archive backend._disable_journal = disable_journal backend._db = db backend._first_id = first_id backend._last_id = last_id backend._async_group = aio.Group() backend._change_cbs = util.CallbackRegistry() backend._msg_queue = aio.Queue(register_queue_size) backend._executor = aio.create_executor() backend._async_group.spawn(aio.call_on_cancel, db.async_close) backend._async_group.spawn(backend._loop) mlog.debug('created backend with database %s', path) return backend
async def create( conf: json.Data, path: Path, components: typing.List[hat.orchestrator.component.Component] ) -> 'WebServer': """Create ui for monitoring and controlling components Args: conf: configuration defined by ``hat://orchestrator.yaml#/definitions/ui`` path: web ui directory path components: components """ srv = WebServer() srv._async_group = aio.Group() srv._components = components srv._change_registry = util.CallbackRegistry() for component in components: srv._async_group.spawn(srv._component_loop, component) addr = urllib.parse.urlparse(conf['address']) juggler_srv = await juggler.listen( addr.hostname, addr.port, lambda conn: srv._async_group.spawn(srv._conn_loop, conn), static_dir=path, autoflush_delay=autoflush_delay) srv._async_group.spawn(aio.call_on_cancel, juggler_srv.async_close) return srv
def __init__(self, query_result=[]): self._query_result = query_result self._receive_queue = aio.Queue() self._register_queue = aio.Queue() self._async_group = aio.Group() self._async_group.spawn(aio.call_on_cancel, self._receive_queue.close) self._async_group.spawn(aio.call_on_cancel, self._register_queue.close)
async def create(conf: json.Data) -> 'Master': """Create master Args: conf: configuration defined by ``hat://monitor/main.yaml#/definitions/master`` """ master = Master() master._last_mid = 0 master._group_algorithms = { group: blessing.Algorithm[algorithm] for group, algorithm in conf['group_algorithms'].items() } master._default_algorithm = blessing.Algorithm[conf['default_algorithm']] master._components = [] master._mid_components = {} master._change_cbs = util.CallbackRegistry() master._active_subgroup = aio.Group() master._active_subgroup.close() master._srv = await chatter.listen(sbs_repo=common.sbs_repo, address=conf['address'], connection_cb=master._create_slave) mlog.debug('master listens slaves on %s', conf['address']) return master
async def connect(host: str, port: int) -> 'Connection': session = aiohttp.ClientSession() try: res = await session.get(f'http://{host}:{port}/json/version') res = await res.json() addr = res['webSocketDebuggerUrl'] ws = await session.ws_connect(addr, max_msg_size=0) except BaseException: await aio.uncancellable(session.close()) raise conn = Connection() conn._ws = ws conn._session = session conn._async_group = aio.Group() conn._event_cbs = util.CallbackRegistry() conn._last_id = 0 conn._result_futures = {} conn._async_group.spawn(aio.call_on_cancel, conn._on_close) conn._async_group.spawn(conn._receive_loop) return conn
async def listen(connection_cb: ConnectionCb, addr: Address, *, bind_connections: bool = False, **kwargs) -> 'Server': """Create listening server If `bind_connections` is ``True``, closing server will close all open incoming connections. Additional arguments are passed directly to `asyncio.start_server`. """ server = Server() server._connection_cb = connection_cb server._bind_connections = bind_connections server._async_group = aio.Group() server._srv = await asyncio.start_server(server._on_connection, addr.host, addr.port, **kwargs) server._async_group.spawn(aio.call_on_cancel, server._on_close) socknames = (socket.getsockname() for socket in server._srv.sockets) server._addresses = [Address(*sockname[:2]) for sockname in socknames] return server
async def async_main(syslog_addr: str, syslog_pem: typing.Optional[Path], ui_addr: str, ui_pem: typing.Optional[Path], ui_path: Path, db_path: Path, db_low_size: int, db_high_size: int, db_enable_archive: bool, db_disable_journal: bool): """Syslog Server async main""" async_group = aio.Group() async_group.spawn(aio.call_on_cancel, asyncio.sleep, 0.1) try: mlog.debug("creating backend...") backend = await _create_resource(async_group, create_backend, db_path, db_low_size, db_high_size, db_enable_archive, db_disable_journal) mlog.debug("creating web server...") await _create_resource(async_group, create_web_server, ui_addr, ui_pem, ui_path, backend) mlog.debug("creating syslog server...") await _create_resource(async_group, create_syslog_server, syslog_addr, syslog_pem, backend) mlog.debug("initialization done") await async_group.wait_closing() finally: mlog.debug("closing...") await aio.uncancellable(async_group.async_close())
async def connect(address: str, *, autoflush_delay: typing.Optional[float] = 0.2, ) -> 'Connection': """Connect to remote server Address represents remote WebSocket URL formated as ``<schema>://<host>:<port>/<path>`` where ``<schema>`` is ``ws`` or ``wss``. Argument `autoflush_delay` defines maximum time delay for automatic synchronization of `local_data` changes. If `autoflush_delay` is set to ``None``, automatic synchronization is disabled and user is responsible for calling :meth:`Connection.flush_local_data`. If `autoflush_delay` is set to ``0``, synchronization of `local_data` is performed on each change of `local_data`. Args: address: remote server address autoflush_delay: autoflush delay """ session = aiohttp.ClientSession() try: ws = await session.ws_connect(address, max_msg_size=0) except BaseException: await aio.uncancellable(session.close()) raise return _create_connection(aio.Group(), ws, autoflush_delay, session)
async def listen( validate_cb: ValidateCb, connection_cb: ConnectionCb, addr: Address = Address('0.0.0.0', 102) ) -> 'Server': """Create COPP listening server Args: validate_cb: callback function or coroutine called on new incomming connection request prior to creating connection object connection_cb: new connection callback addr: local listening address """ async def on_validate(user_data): cp_ppdu = _decode('CP-type', user_data) cp_params = cp_ppdu['normal-mode-parameters'] called_psel_data = cp_params.get('called-presentation-selector') called_psel = (int.from_bytes(called_psel_data, 'big') if called_psel_data else None) cp_pdv_list = cp_params['user-data'][1][0] syntax_names = _sytax_names_from_cp_ppdu(cp_ppdu) cp_user_data = (syntax_names.get_name( cp_pdv_list['presentation-context-identifier']), cp_pdv_list['presentation-data-values'][1]) cpa_user_data = await aio.call(validate_cb, syntax_names, cp_user_data) cpa_ppdu = _cpa_ppdu(syntax_names, called_psel, cpa_user_data) cpa_ppdu_data = _encode('CPA-PPDU', cpa_ppdu) return cpa_ppdu_data async def on_connection(cosp_conn): try: cp_ppdu = _decode('CP-type', cosp_conn.conn_req_user_data) cpa_ppdu = _decode('CPA-PPDU', cosp_conn.conn_res_user_data) syntax_names = _sytax_names_from_cp_ppdu(cp_ppdu) calling_psel, called_psel = _get_psels(cp_ppdu) conn = _create_connection(syntax_names, cosp_conn, cp_ppdu, cpa_ppdu, called_psel, calling_psel) await aio.call(connection_cb, conn) except BaseException as e: mlog.error("error creating new incomming connection: %s", e, exc_info=e) await aio.uncancellable(_close_connection(cosp_conn, _arp_ppdu())) async def wait_cosp_server_closed(): try: await cosp_server.wait_closed() finally: async_group.close() async_group = aio.Group() cosp_server = await cosp.listen(on_validate, on_connection, addr) async_group.spawn(aio.call_on_cancel, cosp_server.async_close) async_group.spawn(wait_cosp_server_closed) srv = Server() srv._async_group = async_group srv._cosp_server = cosp_server return srv
def __init__(self, register_cb=None, query_cb=None, server_id=1): self._register_cb = register_cb self._query_cb = query_cb self._server_id = server_id self._last_instance_id = 0 self._async_group = aio.Group() self._events_cbs = util.CallbackRegistry()
async def _get_msgs(self): loop = asyncio.get_running_loop() msgs = [] msg = await self._msg_queue.get() msgs.append(msg) start = loop.time() while True: while not self._msg_queue.empty(): msgs.append(self._msg_queue.get_nowait()) timeout = register_delay - (loop.time() - start) if timeout <= 0: break if len(msgs) >= register_queue_treshold: break async_group = aio.Group() try: f = async_group.spawn(self._msg_queue.get) await aio.wait_for(asyncio.shield(f), timeout) except asyncio.TimeoutError: break finally: await aio.uncancellable(async_group.async_close()) if not f.cancelled(): msgs.append(f.result()) while not self._msg_queue.empty(): msgs.append(self._msg_queue.get_nowait()) return msgs
async def create_syslog_server( addr: str, pem: typing.Optional[Path], backend: hat.syslog.server.backend.Backend) -> 'SysLogServer': """Create syslog server""" addr = urllib.parse.urlparse(addr) if addr.scheme == 'ssl': ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) ssl_ctx.load_cert_chain(pem) else: ssl_ctx = None async_group = aio.Group() srv = await asyncio.start_server(functools.partial(async_group.spawn, _client_loop, backend), addr.hostname, addr.port, ssl=ssl_ctx) async_group.spawn(aio.call_on_cancel, _asyncio_async_close, srv) mlog.debug('listening for syslog clients on %s:%s', addr.hostname, addr.port) srv = SysLogServer() srv._async_group = async_group return srv
async def async_main(conf: json.Data, ui_path: Path): """Async main""" async_group = aio.Group() async_group.spawn(aio.call_on_cancel, asyncio.sleep, 0.1) try: if sys.platform == 'win32': win32_job = hat.orchestrator.process.Win32Job() _bind_resource(async_group, win32_job) else: win32_job = None components = [] for component_conf in conf['components']: component = hat.orchestrator.component.Component( component_conf, win32_job) _bind_resource(async_group, component) components.append(component) ui = await hat.orchestrator.ui.create(conf['ui'], ui_path, components) _bind_resource(async_group, ui) await async_group.wait_closing() finally: await aio.uncancellable(async_group.async_close())
def test_group_close_empty_group(): g = aio.Group() assert not g.is_closing assert not g.is_closed g.close() assert g.is_closing assert g.is_closed
async def test_group_async_close_subgroup_without_tasks(): g1 = aio.Group() g2 = g1.create_subgroup() await g1.async_close() assert g1.is_closed assert g2.is_closed
async def _client_loop(address, subscriptions, async_run_cb): while True: async_group = aio.Group() try: mlog.debug("connecting to server %s", address) try: client = await connect(address, subscriptions) except Exception as e: mlog.warning("error connecting to server: %s", e, exc_info=e) await asyncio.sleep(reconnect_delay) continue mlog.debug("connected to server - running async_run_cb") async_group.spawn(aio.call_on_cancel, client.async_close) async_group.spawn(aio.call_on_done, client.wait_closing(), async_group.close) async with async_group.create_subgroup() as subgroup: run_future = subgroup.spawn(async_run_cb, client) await asyncio.wait([run_future]) with contextlib.suppress(asyncio.CancelledError): return run_future.result() finally: await aio.uncancellable(async_group.async_close()) mlog.debug("connection to server closed") await asyncio.sleep(reconnect_delay)
def _create_connection(syntax_names, cosp_conn, cp_ppdu, cpa_ppdu, local_psel, remote_psel): cp_user_data = cp_ppdu['normal-mode-parameters']['user-data'] cpa_user_data = cpa_ppdu['normal-mode-parameters']['user-data'] conn_req_user_data = (syntax_names.get_name( cp_user_data[1][0]['presentation-context-identifier']), cp_user_data[1][0]['presentation-data-values'][1]) conn_res_user_data = (syntax_names.get_name( cpa_user_data[1][0]['presentation-context-identifier']), cpa_user_data[1][0]['presentation-data-values'][1]) conn = Connection() conn._cosp_conn = cosp_conn conn._syntax_names = syntax_names conn._conn_req_user_data = conn_req_user_data conn._conn_res_user_data = conn_res_user_data conn._info = ConnectionInfo(local_psel=local_psel, remote_psel=remote_psel, **cosp_conn.info._asdict()) conn._close_ppdu = _arp_ppdu() conn._read_queue = aio.Queue() conn._async_group = aio.Group() conn._async_group.spawn(conn._read_loop) return conn
def __init__(self, conf: json.Data, logger: common.Logger): self._logger = logger self._async_group = aio.Group() self._run_subgroup = self.async_group.create_subgroup() self._autostart_subgroup = self.async_group.create_subgroup() device_logger = common.Logger() handler = device_logger.register_log_cb(self._log) self._async_group.spawn(aio.call_on_cancel, handler.cancel) self._device = devices.create_device(conf, device_logger) self._data = common.DataStorage({'type': conf['type'], 'name': conf['name'], 'autostart': conf['autostart'], 'status': _Status.STOPPED.value, 'data': self._device.data.data}) on_change = functools.partial(self._data.set, 'data') handler = self._device.data.register_change_cb(on_change) self._async_group.spawn(aio.call_on_cancel, handler.cancel) self._log('device created') self._async_group.spawn(aio.call_on_cancel, self._log, 'removing device') if self._data.data['autostart']: self._run_autostart()
async def create(conf): """Create SqliteBackend Args: conf (json.Data): configuration defined by ``hat://event/backends/sqlite.yaml#`` Returns: SqliteBackend """ backend = SqliteBackend() backend._async_group = aio.Group() backend._last_instance_ids = {} db_path = Path(conf['db_path']) backend._conn = await _create_connection(db_path) await backend._conn.execute_script(_db_structure) backend._query_pool = await _create_connection_pool( db_path, conf['query_pool_size']) backend._event_type_registry = await registry.create_event_type_registry( backend) backend._async_group.spawn(aio.call_on_cancel, backend._query_pool.async_close) backend._async_group.spawn(aio.call_on_cancel, backend._conn.async_close) return backend
async def create(local_addr: typing.Optional[Address] = None, remote_addr: typing.Optional[Address] = None, queue_size: int = 0, **kwargs) -> 'Endpoint': """Create new UDP endpoint Args: local_addr: local address remote_addr: remote address queue_size: receive queue max size kwargs: additional arguments passed to :meth:`asyncio.AbstractEventLoop.create_datagram_endpoint` """ endpoint = Endpoint() endpoint._local_addr = local_addr endpoint._remote_addr = remote_addr endpoint._async_group = aio.Group() endpoint._queue = aio.Queue(queue_size) class Protocol(asyncio.DatagramProtocol): def connection_lost(self, exc): endpoint._async_group.close() def datagram_received(self, data, addr): endpoint._queue.put_nowait((data, Address(addr[0], addr[1]))) loop = asyncio.get_running_loop() endpoint._transport, endpoint._protocol = \ await loop.create_datagram_endpoint(Protocol, local_addr, remote_addr, **kwargs) endpoint._async_group.spawn(aio.call_on_cancel, endpoint._transport.close) endpoint._async_group.spawn(aio.call_on_cancel, endpoint._queue.close) return endpoint
async def create_adapter(conf, event_client): adapter = MockAdapter() adapter._event_client = event_client adapter._group = aio.Group() adapter._sessions = [] adapter._group.spawn(adapter._event_loop) return adapter
async def create_client(address): ui_address = urllib.parse.urlparse(address) ws_address = 'ws://{}:{}/ws'.format(ui_address.hostname, ui_address.port) client = Client() client._conn = await juggler.connect(ws_address) client._async_group = aio.Group() client._async_group.spawn(aio.call_on_cancel, client._conn.async_close) return client
async def test_call_on_done_example(): f = asyncio.Future() group = aio.Group() group.spawn(aio.call_on_done, f, group.close) assert group.is_open f.set_result(None) await group.wait_closed() assert group.is_closed
def __init__(self): self._async_group = aio.Group() self._mid_queue = aio.Queue() self._global_components_queue = aio.Queue() self._local_components_queue = aio.Queue() self._mid = 0 self._local_components = [] self._global_components = [] self._change_cbs = util.CallbackRegistry()
async def test_group_spawn_async_close(): async def task(): group.spawn(group.async_close) await asyncio.Future() group = aio.Group() group.spawn(task) await group.wait_closed()
async def create_view_manager(conf: json.Data ) -> 'ViewManager': """Create view manager""" manager = ViewManager() manager._view_confs = {view_conf['name']: view_conf for view_conf in conf['views']} manager._async_group = aio.Group() manager._executor = aio.create_executor() return manager
def __init__(self, user, roles): self._user = user self._roles = roles self._local_data = None self._local_data_queue = aio.Queue() self._receive_queue = aio.Queue() self._remote_data = None self._change_cbs = util.CallbackRegistry() self._async_group = aio.Group()
async def create(conf, engine): module = RemoteModule() module._subscription = hat.event.server.common.Subscription( conf['subscriptions']) module._async_group = aio.Group() module._conn = await chatter.connect(sbs_repo, conf['address']) module._async_group.spawn(aio.call_on_cancel, module._on_close) module._send('ModuleCreate', None) return module
def create_backend(msgid): backend = MockBackend() backend._msgid = msgid backend._first_id = 0 backend._last_id = 0 backend._async_group = aio.Group() backend._change_cbs = util.CallbackRegistry() backend._msg_queue = aio.Queue() return backend