def _loop(event_loop_policy, caplog): basic_config( log_format="plain", stream=caplog.handler.stream, ) try: asyncio.set_event_loop_policy(event_loop_policy) loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) try: yield loop finally: basic_config( log_format="plain", stream=sys.stderr, ) if loop.is_closed(): return with suppress(Exception): loop.run_until_complete(loop.shutdown_asyncgens()) with suppress(Exception): loop.close() finally: asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy())
def basic_config(level: Union[int, str] = LogLevel.default(), log_format: Union[str, LogFormat] = LogFormat.default(), buffered: bool = True, buffer_size: int = 1024, flush_interval: Union[int, float] = 0.2, loop: asyncio.AbstractEventLoop = None, **kwargs: Any) -> None: loop = loop or asyncio.get_event_loop() unhandled_hook = UnhandledLoopHook() def wrap_handler(handler: logging.Handler) -> logging.Handler: nonlocal buffer_size, buffered, loop, unhandled_hook unhandled_hook.set_handler(handler) if buffered: return wrap_logging_handler( handler=handler, buffer_size=buffer_size, flush_interval=flush_interval, loop=loop, ) return handler aiomisc_log.basic_config(level=level, log_format=log_format, handler_wrapper=wrap_handler, **kwargs) loop.set_exception_handler(unhandled_hook)
def main() -> int: proto = FileIOProtocol(sys.stdin.buffer) log_level, log_format = proto.receive() basic_config(level=log_level, log_format=log_format) address: AddressType = proto.receive() cookie: bytes = proto.receive() worker_ids: Tuple[bytes, ...] = proto.receive() initializer, initializer_args, initializer_kwargs = proto.receive() sys.stdin.close() del proto env = dict(os.environ) env["AIOMISC_NO_PLUGINS"] = "" def create_worker() -> Worker: nonlocal env return Worker( log_level, log_format, address, cookie, worker_id, env, initializer, initializer_args, initializer_kwargs, ) log.debug("Starting %d processes", len(worker_ids)) for worker_id in worker_ids: worker = create_worker() PROCESSES[worker] = worker_id log.info("Waiting workers") atexit.register(at_exit) try: while True: for worker in tuple(PROCESSES.keys()): if worker.is_running: continue worker.close() log.debug( "Worker PID: %d exited with status %d", worker.process.pid, worker.process.returncode, ) worker_id = PROCESSES.pop(worker) worker = create_worker() PROCESSES[worker] = worker_id sleep(0.01) except KeyboardInterrupt: pass return 0
def _process_inner(function: Callable[..., Any], log_level: str, log_format: str, start_event: synchronize.Event, stop_event: synchronize.Event, **kwargs: Any) -> None: basic_config(level=log_level, log_format=log_format) start_event.set() try: function(**kwargs) finally: stop_event.set()
def __init__(self, *services: Service, loop: asyncio.AbstractEventLoop = None, pool_size: int = None, log_level: Union[int, str] = DEFAULT_LOG_LEVEL, log_format: Union[str, LogFormat] = DEFAULT_LOG_FORMAT, log_buffering: bool = DEFAULT_AIOMISC_BUFFERING, log_buffer_size: int = DEFAULT_AIOMISC_BUFFER_SIZE, log_flush_interval: float = DEFAULT_AIOMISC_LOG_FLUSH, log_config: bool = DEFAULT_AIOMISC_LOG_CONFIG, policy: asyncio.AbstractEventLoopPolicy = event_loop_policy, debug: bool = DEFAULT_AIOMISC_DEBUG): """ :param debug: set debug to event loop :param loop: loop :param services: Service instances which will be starting. :param pool_size: thread pool size :param log_level: Logging level which will be configured :param log_format: Logging format which will be configured :param log_buffer_size: Buffer size for logging :param log_flush_interval: interval in seconds for flushing logs :param log_config: if False do not configure logging """ self._debug = debug self._loop = loop self._loop_owner = False self._tasks: MutableSet[asyncio.Task] = WeakSet() self._thread_pool: Optional[ExecutorType] = None self._closing: Optional[asyncio.Event] = None self.ctx: Optional[Context] = None self.log_buffer_size = log_buffer_size self.log_buffering = log_buffering self.log_config = log_config self.log_flush_interval = log_flush_interval self.log_format = log_format self.log_level = log_level self.policy = policy self.pool_size = pool_size self.services = services self.shutting_down = False self.pre_start = self.PRE_START.copy() self.post_start = self.POST_START.copy() self.pre_stop = self.PRE_STOP.copy() self.post_stop = self.POST_STOP.copy() if self.log_config: aiomisc_log.basic_config( level=self.log_level, log_format=self.log_format, ) if self._loop is not None: set_current_loop(self._loop)
def main() -> int: global STOPPING proto_stdin = FileIOProtocol(sys.stdin.buffer) log_level, log_format = proto_stdin.receive() basic_config(level=log_level, log_format=log_format) address: AddressType = proto_stdin.receive() cookie: bytes = proto_stdin.receive() worker_ids: Tuple[bytes, ...] = proto_stdin.receive() initializer, initializer_args, initializer_kwargs = proto_stdin.receive() worker_id: Optional[bytes] def run_initializer() -> None: # Saving the initializer result and prevent freeing it if not initializer: return # noinspection PyBroadException try: initializer(*initializer_args, **initializer_kwargs) except BaseException as e: log.exception( "WorkerPool initializer %r has been failed", initializer, ) bad_initializer(address, cookie, worker_ids[0], e) raise SystemExit(0) sys.stdin.close() del proto_stdin run_initializer() signal.signal(INT_SIGNAL, handle_interrupt) atexit.register(at_exit) log.debug("Forking %d processes", len(worker_ids)) for worker_id in worker_ids: fork(worker_id, cookie, address) log.debug("Waiting workers") while not STOPPING.is_set(): pid, status = os.wait() log_func = log.debug if status == 0 else log.warning log_func("Worker PID: %d exited with status %d", pid, status) if status == 0: continue worker_id = PROCESSES.pop(pid, None) if worker_id is None: continue fork(worker_id, cookie, address) return 0
def main(): arguments = Parser( auto_env_var_prefix="LXD_EXPORTER_", config_files=[ "~/.config/lxd-exporter.ini", os.getenv("LXD_EXPORTER_CONFIG", "/etc/lxd-exporter.ini"), ], ) arguments.parse_args() arguments.sanitize_env() basic_config( log_format=arguments.log.format, level=arguments.log.level, ) if arguments.collector.skip_interface: logging.info( "Network interfaces starts with %r will be skipped", list(arguments.collector.skip_interface), ) StateCollector.SKIP_INTERFACES = arguments.collector.skip_interface services = [ MetricsAPI( address=arguments.http.address, port=arguments.http.port, ), CollectorService( interval=arguments.collector.interval, delay=arguments.collector.delay, lxd_url=arguments.lxd.url, lxd_cert=arguments.lxd.server_cert, client_cert=arguments.lxd.client_cert, client_key=arguments.lxd.client_key, ), SDWatchdogService(), ] with entrypoint( *services, log_format=arguments.log.format, log_level=arguments.log.level, pool_size=arguments.pool_size, ) as loop: loop.run_forever()
def worker_inner() -> None: proto = FileIOProtocol(sys.stdin.buffer) log_level, log_format = proto.receive() basic_config(level=log_level, log_format=log_format) address: AddressType = proto.receive() cookie: bytes = proto.receive() worker_id: bytes = proto.receive() initializer, initializer_args, initializer_kwargs = proto.receive() sys.stdin.close() del proto if initializer is not None: try: initializer(*initializer_args, **initializer_kwargs) except BaseException as e: bad_initializer(address, cookie, worker_id, e) raise SystemExit(0) del initializer del initializer_args del initializer_kwargs return worker(address, cookie, worker_id)
plugins = {} for entry_point in pkg_resources.iter_entry_points("aiomisc.plugins"): plugins[entry_point.name] = entry_point.load() logger = logging.getLogger(__name__) for name, plugin in plugins.items(): try: logger.debug("Trying to load %r %r", name, plugin) plugin.setup() except: # noqa logger.exception("Error on %s aiomisc plugin setup", name) raise return MappingProxyType(plugins) plugins: Mapping[str, Callable] = setup_plugins() __all__ = ("plugins", ) if __name__ == "__main__": from aiomisc_log import LogFormat, basic_config basic_config(log_format=LogFormat.plain) logging.info("Available %s plugins.", len(plugins)) for name in plugins: print(name)