async def create(conf, backend_engine): """Create module engine Args: conf (hat.json.Data): configuration defined by ``hat://event/main.yaml#/definitions/module_engine`` backend_engine (hat.event.backend_engine.BackendEngine): backend engine Returns: ModuleEngine """ engine = ModuleEngine() engine._backend = backend_engine engine._async_group = aio.Group() engine._register_queue = aio.Queue() engine._register_cbs = util.CallbackRegistry() last_event_id = await engine._backend.get_last_event_id() engine._server_id = last_event_id.server engine._last_instance_id = last_event_id.instance engine._modules = [] for module_conf in conf['modules']: py_module = importlib.import_module(module_conf['module']) module = await py_module.create(module_conf, engine) engine._async_group.spawn(aio.call_on_cancel, module.async_close) engine._modules.append(module) engine._async_group.spawn(engine._register_loop) return engine
async def create( conf: json.Data, path: Path, components: typing.List[hat.orchestrator.component.Component] ) -> 'WebServer': """Create ui for monitoring and controlling components Args: conf: configuration defined by ``hat://orchestrator.yaml#/definitions/ui`` path: web ui directory path components: components """ srv = WebServer() srv._async_group = aio.Group() srv._components = components srv._change_registry = util.CallbackRegistry() for component in components: srv._async_group.spawn(srv._component_loop, component) addr = urllib.parse.urlparse(conf['address']) juggler_srv = await juggler.listen( addr.hostname, addr.port, lambda conn: srv._async_group.spawn(srv._conn_loop, conn), static_dir=path, autoflush_delay=autoflush_delay) srv._async_group.spawn(aio.call_on_cancel, juggler_srv.async_close) return srv
async def connect(conf): """Connect to local monitor server Connection is established once chatter communication is established. Args: conf (hat.json.Data): configuration as defined by ``hat://monitor/client.yaml#`` Returns: Client """ client = Client() client._name = conf['name'] client._group = conf['group'] client._address = conf['component_address'] client._components = [] client._info = None client._ready = None client._change_cbs = util.CallbackRegistry() client._async_group = aio.Group() client._conn = await chatter.connect(common.sbs_repo, conf['monitor_address']) client._async_group.spawn(aio.call_on_cancel, client._conn.async_close) mlog.debug("connected to local monitor server %s", conf['monitor_address']) client._async_group.spawn(client._receive_loop) return client
async def create(conf): """Create local monitor server Args: conf (hat.json.Data): configuration defined by ``hat://monitor/main.yaml#/definitions/server`` Returns: Server """ server = Server() server._default_rank = conf['default_rank'] server._rank_cache = {} server._master = None server._mid = 0 server._components = [] server._async_group = aio.Group(server._on_exception) server._change_cbs = util.CallbackRegistry() server._master_change_handler = None server._connections = {} server._local_components = [] chatter_server = await chatter.listen( sbs_repo=common.sbs_repo, address=conf['address'], on_connection_cb=lambda conn: server._async_group.spawn( server._connection_loop, conn)) server._async_group.spawn(aio.call_on_cancel, chatter_server.async_close) mlog.debug('monitor server listens clients on %s', conf['address']) return server
async def create(conf: json.Data) -> 'Master': """Create master Args: conf: configuration defined by ``hat://monitor/main.yaml#/definitions/master`` """ master = Master() master._last_mid = 0 master._group_algorithms = { group: blessing.Algorithm[algorithm] for group, algorithm in conf['group_algorithms'].items() } master._default_algorithm = blessing.Algorithm[conf['default_algorithm']] master._components = [] master._mid_components = {} master._change_cbs = util.CallbackRegistry() master._active_subgroup = aio.Group() master._active_subgroup.close() master._srv = await chatter.listen(sbs_repo=common.sbs_repo, address=conf['address'], connection_cb=master._create_slave) mlog.debug('master listens slaves on %s', conf['address']) return master
async def create_master(asn1_repo, context, remote_host, remote_port=161, version=Version.V2C): """Create master For v1 and v2c, context's name is used as community name. Args: asn1_repo (hat.asn1.Repository): asn1 repository context (Context): context remote_host (str): remote host name remote_port (int): remote udp port version (Version): version Returns: Master """ master = Master() master._version = version master._context = context master._response_cbs = util.CallbackRegistry() master._request_id = 1 master._serializer = Serializer(hat.asn1.ber.BerEncoder(asn1_repo)) master._async_group = util.AsyncGroup() master._udp = await hat.drivers.udp.create( local_addr=None, remote_addr=(remote_host, remote_port)) master._async_group.spawn(master._read_loop) return master
def __init__(self, register_cb=None, query_cb=None, server_id=1): self._register_cb = register_cb self._query_cb = query_cb self._server_id = server_id self._last_instance_id = 0 self._async_group = aio.Group() self._events_cbs = util.CallbackRegistry()
def test_callback_registry_with_exception_cb(value_count, cb_count): def exception_cb(e): assert isinstance(e, Exception) raised.append(str(e)) def cb(value): raise Exception(value) registry = util.CallbackRegistry(exception_cb) handlers = [registry.register(cb) for _ in range(cb_count)] raised = [] expected = [] for value in range(value_count): registry.notify(str(value)) expected.extend(str(value) for _ in range(cb_count)) assert raised == expected for handler in handlers: handler.cancel() raised = [] expected = [] for value in range(value_count): registry.notify(str(value)) assert raised == expected
async def create_backend(path: Path, low_size: int, high_size: int, enable_archive: bool, disable_journal: bool ) -> 'Backend': """Create backend""" db = await database.create_database(path, disable_journal) try: first_id = await db.get_first_id() last_id = await db.get_last_id() except BaseException: await aio.uncancellable(db.async_close()) raise backend = Backend() backend._path = path backend._low_size = low_size backend._high_size = high_size backend._enable_archive = enable_archive backend._disable_journal = disable_journal backend._db = db backend._first_id = first_id backend._last_id = last_id backend._async_group = aio.Group() backend._change_cbs = util.CallbackRegistry() backend._msg_queue = aio.Queue(register_queue_size) backend._executor = aio.create_executor() backend._async_group.spawn(aio.call_on_cancel, db.async_close) backend._async_group.spawn(backend._loop) mlog.debug('created backend with database %s', path) return backend
async def connect(host: str, port: int) -> 'Connection': session = aiohttp.ClientSession() try: res = await session.get(f'http://{host}:{port}/json/version') res = await res.json() addr = res['webSocketDebuggerUrl'] ws = await session.ws_connect(addr, max_msg_size=0) except BaseException: await aio.uncancellable(session.close()) raise conn = Connection() conn._ws = ws conn._session = session conn._async_group = aio.Group() conn._event_cbs = util.CallbackRegistry() conn._last_id = 0 conn._result_futures = {} conn._async_group.spawn(aio.call_on_cancel, conn._on_close) conn._async_group.spawn(conn._receive_loop) return conn
def create_module_engine(register_cb=lambda _: [], query_cb=lambda _: []): engine = MockModuleEngine() engine._async_group = aio.Group() engine._last_instance_id = 0 engine._register_cb = register_cb engine._query_cb = query_cb engine._register_event_cbs = util.CallbackRegistry() return engine
async def create_backend(_): backend = MemoryBackend() backend._first_id = None backend._last_id = None backend._entries = [] backend._async_group = aio.Group(lambda e: print(f"memory: error in {e}")) backend._change_cbs = util.CallbackRegistry() return backend
def __init__(self): self._async_group = aio.Group() self._mid_queue = aio.Queue() self._global_components_queue = aio.Queue() self._local_components_queue = aio.Queue() self._mid = 0 self._local_components = [] self._global_components = [] self._change_cbs = util.CallbackRegistry()
def create_master(mid=0, components=[]): master = MockMaster() master._async_group = aio.Group() master._mid = mid master._components = components master._components_queue = aio.Queue() master._rank_queue = aio.Queue() master._change_cbs = util.CallbackRegistry() return master
def __init__(self, user, roles): self._user = user self._roles = roles self._local_data = None self._local_data_queue = aio.Queue() self._receive_queue = aio.Queue() self._remote_data = None self._change_cbs = util.CallbackRegistry() self._async_group = aio.Group()
def create_backend(msgid): backend = MockBackend() backend._msgid = msgid backend._first_id = 0 backend._last_id = 0 backend._async_group = aio.Group() backend._change_cbs = util.CallbackRegistry() backend._msg_queue = aio.Queue() return backend
def __init__(self, conf): self._conf = conf self._status = Status.DELAYED if conf['delay'] else Status.STOPPED self._revive = conf['revive'] self._change_cbs = util.CallbackRegistry( exception_cb=lambda e: mlog.warning( "change callback exception: %s", e, exc_info=e)) self._started_queue = aio.Queue() self._async_group = aio.Group() self._async_group.spawn(self._run_loop)
async def _create_listener(address): listener = _Listener() listener._cbs = set() listener._connection_cbs = util.CallbackRegistry() listener._async_group = aio.Group( lambda e: mlog.error("error in listener: %s", e, exc_info=e)) chatter_server = await chatter.listen(common.sbs_repo, address, listener._on_connection) listener._async_group.spawn(aio.call_on_cancel, chatter_server.async_close) return listener
def __init__(self, async_group, name, conn, user, roles): self._name = name self._conn = conn self._user = user self._roles = roles self._remote_data = None self._change_cbs = util.CallbackRegistry() self._receive_queue = aio.Queue() self._async_group = aio.Group() self._async_group.spawn(self._client_loop)
def _create_master(modbus_type, async_group, reader, writer): master = Master() master._modbus_type = modbus_type master._async_group = async_group master._reader = reader master._writer = writer master._send_queue = aio.Queue() master._receive_cbs = util.CallbackRegistry() master._async_group.spawn(master._send_loop) master._async_group.spawn(master._receive_loop) return master
def _create_local_master(conf): master = LocalMaster() master._async_group = aio.Group(exception_cb=master._on_exception) master._change_cbs = util.CallbackRegistry() master._mid = 0 master._connections = {} master._global_components = [] master._default_algorithm = blessing.Algorithm[conf['default_algorithm']] master._group_algorithms = { group: blessing.Algorithm[alg] for group, alg in conf['group_algorithms'].items() } return master
def test_callback_registry_example(): x = [] y = [] registry = util.CallbackRegistry() registry.register(x.append) registry.notify(1) with registry.register(y.append): registry.notify(2) registry.notify(3) assert x == [1, 2, 3] assert y == [2]
async def _create_remote_master(parent_address): master = RemoteMaster() master._async_group = aio.Group(exception_cb=master._on_exception) master._change_cbs = util.CallbackRegistry() master._local_components = [] master._mid = None master._components = [] master._parent_address = parent_address master._conn = await chatter.connect(common.sbs_repo, parent_address) msg_master = await master._conn.receive() master._process_msg_master(msg_master) master._async_group.spawn(aio.call_on_cancel, master._conn.async_close) master._async_group.spawn(master._receive_loop) return master
def __init__(self, conf: json.Data, win32_job: typing.Optional[ hat.orchestrator.process.Win32Job] = None): # NOQA self._conf = conf self._win32_job = win32_job self._status = Status.DELAYED if conf['delay'] else Status.STOPPED self._revive = conf['revive'] self._change_cbs = util.CallbackRegistry( exception_cb=lambda e: mlog.warning( "change callback exception: %s", e, exc_info=e)) self._started_queue = aio.Queue() self._async_group = aio.Group() self._async_group.spawn(self._run_loop)
def test_callback_registry_without_exception_cb(cb_count): def cb(): nonlocal call_count call_count += 1 raise Exception() registry = util.CallbackRegistry() for _ in range(cb_count): registry.register(cb) call_count = 0 with pytest.raises(Exception): registry.notify() assert call_count == 1
def __init__(self, conf, logger): self._logger = logger self._next_data_ids = (str(i) for i in itertools.count(1)) self._next_command_ids = (str(i) for i in itertools.count(1)) self._data_change_cbs = util.CallbackRegistry() self._data = common.DataStorage({ 'properties': conf['properties'], 'connection_count': 0, 'data': {next(self._next_data_ids): i for i in conf['data']}, 'commands': { next(self._next_command_ids): dict(i, value=None) for i in conf['commands'] } })
def _create_connection(ws, parent_group=None, session=None): conn = Connection() conn._local_data_synced = None conn._remote_data = None conn._local_data = None conn._sync_local_future = None conn._flush_future = asyncio.Future() conn._flush_future.set_result(None) conn._message_queue = aio.Queue() conn._remote_change_cbs = util.CallbackRegistry() conn._async_group = (parent_group.create_subgroup() if parent_group else aio.Group(exception_cb=conn._on_exception)) conn._ws = ws conn._session = session conn._async_group.spawn(conn._receive_loop) return conn
def _create_connection(async_group, ws, autoflush_delay, session=None): conn = Connection() conn._async_group = async_group conn._ws = ws conn._autoflush_delay = autoflush_delay conn._session = session conn._remote_change_cbs = util.CallbackRegistry() conn._remote_data = None conn._local_data = None conn._message_queue = aio.Queue() conn._flush_queue = aio.Queue() conn._local_data_queue = aio.Queue() async_group.spawn(aio.call_on_cancel, conn._on_close) async_group.spawn(conn._receive_loop) async_group.spawn(conn._sync_loop) return conn
def test_callback_registry(): counter = 0 def on_event(): nonlocal counter counter = counter + 1 registry = util.CallbackRegistry() assert counter == 0 with registry.register(on_event): registry.notify() assert counter == 1 registry.notify() assert counter == 1
async def create(conf: json.Data) -> 'Server': """Create local monitor server Args: conf: configuration defined by ``hat://monitor/main.yaml#/definitions/server`` """ server = Server() server._default_rank = conf['default_rank'] server._last_cid = 0 server._mid = 0 server._rank_cache = {} server._local_components = [] server._global_components = [] server._change_cbs = util.CallbackRegistry() server._srv = await chatter.listen(sbs_repo=common.sbs_repo, address=conf['address'], connection_cb=server._create_client) mlog.debug('monitor server listens clients on %s', conf['address']) return server