Example #1
0
def run_until_complete(coro: Awaitable, loop: AbstractEventLoop=None):
    """Run a task through to completion.

    The ``.run_until_complete()`` method on asyncio event loop objects does not
    finish tasks when it receives a SIGINT/CTRL-C.  The method simply raises a
    ``KeyboardInterrupt`` exception and this usually results in warnings about
    unfinished tasks plus some "event loop closed" ``RuntimeError`` exceptions
    in pending tasks.

    This is a really annoying default behavior and this function aims at
    replacing that behavior with something that ensures the task actually runs
    through to completion.  When the ``KeyboardInterrupt`` exception is caught,
    the task is canceled and resumed to give it a chance to clean up properly.

    .. versionadded:: 0.4
    .. versionchanged:: 0.5 Can now be called with a ``asyncio.Task`` argument.

    """

    loop = loop or asyncio.get_event_loop()
    if isinstance(coro, asyncio.Task):
        task = coro
    else:
        task = loop.create_task(coro)
    try:
        loop.run_until_complete(task)
    except KeyboardInterrupt:
        task.cancel()
        try:
            loop.run_until_complete(task)
        except asyncio.CancelledError:
            return None
    return task.result()
Example #2
0
    def init_requirements(self, loop: asyncio.AbstractEventLoop):
        getattr(super(), "init_requirements")(loop)
        loop.run_until_complete(
            self.connect_to_stream()
        )

        # Save to app object to be accessible in request handlers
        self._server_instance.app.stream = self._stream
Example #3
0
def run(*tasks: Awaitable, loop: asyncio.AbstractEventLoop=asyncio.get_event_loop()):
    """Helper to run tasks in the event loop

    :param tasks: Tasks to run in the event loop.
    :param loop: The event loop.
    """
    futures = [asyncio.ensure_future(task, loop=loop) for task in tasks]
    return loop.run_until_complete(asyncio.gather(*futures))
Example #4
0
def _cancel_all_tasks(loop: asyncio.AbstractEventLoop) -> None:
    to_cancel = all_tasks(loop)
    if not to_cancel:
        return

    for task in to_cancel:
        task.cancel()

    loop.run_until_complete(
        asyncio.gather(*to_cancel, loop=loop, return_exceptions=True))

    for task in to_cancel:
        if task.cancelled():
            continue
        if task.exception() is not None:
            loop.call_exception_handler({
                'message': 'unhandled exception during asyncio.run() shutdown',
                'exception': task.exception(),
                'task': task,
            })
Example #5
0
def test_throttle_runs_coroutine(redis, event_loop: asyncio.AbstractEventLoop):
    throttle = AsyncThrottle('test', 1, redis)
    done = False

    async def do_something(a, b):
        nonlocal done
        await asyncio.sleep(0.1)
        done = True
        return a * b

    coroutine = do_something(3, 7)
    assert not done
    assert event_loop.run_until_complete(throttle.run(coroutine)) == 21
    assert done
    assert not redis.exists('redis_gt:test')
Example #6
0
 def test_get_invalid_sig(
     self,
     caplog: pytest.LogCaptureFixture,
     temp_event_loop: asyncio.AbstractEventLoop,
 ):
     caplog.set_level(logging.DEBUG)
     mock_reader = self.MockAsyncReader(
         b"PROXI TCP4 1.2.3.4 5.6.7.8 9 10\r\n")
     reslt = temp_event_loop.run_until_complete(get_proxy(mock_reader))
     assert isinstance(reslt, ProxyData)
     assert not reslt.valid
     expect_msg = "PROXY unrecognized signature"
     assert reslt.error == expect_msg
     expect = ("mail.debug", 30, "PROXY error: " + expect_msg)
     assert expect in caplog.record_tuples
Example #7
0
def create_tasks(
    loop: asyncio.AbstractEventLoop,
    *arg: Any,
    **kwargs: Any,
) -> Tasks:
    """
    .. deprecated:: 1.0
        This is a synchronous interface to `spawn_tasks`.
        It is only kept for backward compatibility, as it was exposed
        via the public interface of the framework.
    """
    warnings.warn(
        "kopf.create_tasks() is deprecated: "
        "use kopf.spawn_tasks() or kopf.operator().", DeprecationWarning)
    return loop.run_until_complete(spawn_tasks(*arg, **kwargs))
Example #8
0
def save_download(args: Namespace, form: FormsiteForm,
                  loop: asyncio.AbstractEventLoop):
    """Download all files uploaded to the form using the File Upload control"""

    if not args.download:
        path = Path(f"./download_{form.form_id}_{TIMESTAMP}").resolve()
    else:
        path = Path(args.download).resolve()
    # ----
    os.makedirs(path.parent.as_posix(), exist_ok=True)
    str_path = path.as_posix()
    download = form.async_downloader(
        str_path,
        max_concurrent=args.concurrent_downloads,
        timeout=args.timeout,
        max_attempts=args.retries,
        url_filter_re=args.extract_regex,
        filename_substitution_re_pat=args.download_regex,
        overwrite_existing=args.dont_overwrite_downloads,
        strip_prefix=args.strip_prefix,
        callback=download_pbar_callback,
    )
    # ----
    loop.run_until_complete(download.run())
Example #9
0
def api_server_fixture(loop: asyncio.AbstractEventLoop, aiohttp_client):
    sp_mock = SPMQTTMock()
    influx_mock = InfluxMQTTMock()
    mqtt_influx_mock = MQTTMockClient(InfluxMQTTMock())
    mqtt_sp_mock = MQTTMockClient(SPMQTTMock())
    api_server = APIServer()

    client = loop.run_until_complete(aiohttp_client(api_server.app))
    influx_mock_task = loop.create_task(
        mqtt_influx_mock.run_loop(config.MQTT_BROKER_IP))
    sp_mock_task = loop.create_task(
        mqtt_sp_mock.run_loop(config.MQTT_BROKER_IP))
    loop.run_until_complete(api_server.start())

    yield client, influx_mock, sp_mock

    mqtt_influx_mock.stop_loop()
    mqtt_sp_mock.stop_loop()
    loop.run_until_complete(influx_mock_task)
    loop.run_until_complete(sp_mock_task)
    loop.run_until_complete(api_server.stop())
Example #10
0
def _serve(loop: asyncio.AbstractEventLoop, shutdown_signal: asyncio.Event, host: str, port: int) -> None:
    app = web.Application()
    app.add_routes(routes)
    runner = web.AppRunner(app)
    asyncio.set_event_loop(loop)
    asyncio.set_event_loop(loop)
    loop.run_until_complete(runner.setup())
    site = web.TCPSite(runner, host, port)
    loop.run_until_complete(site.start())
    loop.run_until_complete(shutdown_signal.wait())
Example #11
0
def stop_schedule(addr: Tuple[str, int],
                  name: str = '',
                  list_schedules: bool = False,
                  loop: asyncio.AbstractEventLoop = None):
    """Send a command to the server to stop running a schedule.

    Args:
        addr (tuple): Server IP address
        name (str)['']: Name of the schedule.
        list_schedules (bool)[False]: If True request and print the schedules that the server is running.
        loop (asyncio.AbstractEventLoop)[None]: Event loop to run the async command with.
    """
    if loop is None:
        loop = get_loop()
    return loop.run_until_complete(
        stop_schedule_async(addr, name, list_schedules=list_schedules))
Example #12
0
def update_server(addr: Tuple[str, int],
                  module_name: str = '',
                  list_schedules: bool = False,
                  loop: asyncio.AbstractEventLoop = None):
    """Send a command to the server to Update Commands.

    Args:
        addr (tuple): Server IP address
        module_name (str)['']: Module name to import/reload. If blank import/reload all modules.
        list_schedules (bool)[False]: If True request and print the schedules that the server is running.
        loop (asyncio.AbstractEventLoop)[None]: Event loop to run the async command with.
    """
    if loop is None:
        loop = get_loop()
    return loop.run_until_complete(
        update_server_async(addr, module_name, list_schedules))
Example #13
0
def run(loop: asyncio.AbstractEventLoop, arguments: dict):
    try:
        loop.run_until_complete(main(arguments))
        loop.run_until_complete(asyncio.sleep(0.250))
        loop.run_until_complete(loop.shutdown_asyncgens())
    
    except Exception:
        log.exception("there has been an unrecoverable error")

    finally:
        loop.close()
Example #14
0
def client(
    loop: asyncio.AbstractEventLoop,
    aiohttp_unused_port: Callable[[], int],
    aiohttp_client: Callable[..., Awaitable[TestClient]],
    valid_config: Dict[str, Any],
    monkeypatch,
) -> Iterator[TestClient]:
    # increase the speed to fail
    monkeypatch.setattr(auto_deploy_task, "RETRY_COUNT", 2)
    monkeypatch.setattr(auto_deploy_task, "RETRY_WAIT_SECS", 1)

    app = create(valid_config)
    server_kwargs = {"port": aiohttp_unused_port(), "host": "localhost"}

    client = loop.run_until_complete(
        aiohttp_client(app, server_kwargs=server_kwargs))
    yield client
Example #15
0
def block(coroutine: Coroutine, loop: asyncio.AbstractEventLoop, *, timeout):
    if loop.is_running():
        coroutine.close()
        raise CannotBlockHere(
            "It appears you have tried to use a blocking API method "
            "from within an event loop. Unfortunately this is unsupported. "
            "Instead, use the async version of the method. This commonly "
            "occurs when calling bus methods from within a bus event listener. "
            "In this case the only option is to define you listeners as async."
        )
    try:
        val = loop.run_until_complete(
            asyncio.wait_for(coroutine, timeout=timeout, loop=loop))
    except Exception as e:
        # The intention here is to get sensible stack traces from exceptions within blocking calls
        raise e
    return val
Example #16
0
def send(data: Any, config_name: str, *, loop: AbstractEventLoop):
    async def _send():
        try:
            async with factory.get_sender(config_name) as queue:
                await queue.send(data=data)

        except (NotFound, CannotImport) as ex:
            print_err(str(ex))
            return -1

        except QueueNotFound:
            print_err(f"Queue not found.")
            return -2

        return 0

    return loop.run_until_complete(_send())
Example #17
0
def test_user_create_by_superuser(client: TestClient, event_loop: EventLoop):
    email = random_email()
    password = random_lower_string()
    is_superuser = True
    is_active = False
    user_in = UserCreateBySuperuser(
        email=email,
        password=password,
        is_superuser=is_superuser,
        is_active=is_active,
    )
    user = event_loop.run_until_complete(
        crud.user.create_by_superuser(user_in))
    assert user.email == email
    assert hasattr(user, "password_hash")
    assert user.is_superuser == is_superuser
    assert user.is_active == is_active
def client(
    event_loop: asyncio.AbstractEventLoop,
    postgres_session: sa.orm.session.Session,
    rabbit_service: RabbitSettings,
    redis_settings: RedisSettings,
    simcore_services_ready: None,
    aiohttp_client: Callable,
    app_config: Dict[str, Any],  ## waits until swarm with *_services are up
    mocker: MockerFixture,
    monkeypatch_setenv_from_app_config: Callable,
) -> TestClient:

    cfg = deepcopy(app_config)

    assert cfg["rest"]["version"] == API_VTAG

    cfg["storage"]["enabled"] = False
    cfg["main"]["testing"] = True

    # fake config
    monkeypatch_setenv_from_app_config(cfg)
    app = create_safe_application(app_config)

    assert setup_settings(app)
    setup_db(app)
    setup_session(app)
    setup_security(app)
    setup_rest(app)
    setup_diagnostics(app)
    setup_login(app)
    setup_users(app)
    setup_socketio(app)
    setup_projects(app)
    setup_computation(app)
    setup_director_v2(app)
    setup_resource_manager(app)
    # no garbage collector

    return event_loop.run_until_complete(
        aiohttp_client(
            app,
            server_kwargs={
                "port": app_config["main"]["port"],
                "host": app_config["main"]["host"],
            },
        ))
Example #19
0
def run_sendria_servers(loop: asyncio.AbstractEventLoop) -> NoReturn:
    # initialize db
    loop.run_until_complete(db.setup(config.CONFIG.db))

    # initialize and start webhooks
    callbacks_enabled = callback.setup(
        debug_mode=config.CONFIG.debug,
        callback_webhook_url=config.CONFIG.callback_webhook_url,
        callback_webhook_method=config.CONFIG.callback_webhook_method,
        callback_webhook_auth=config.CONFIG.callback_webhook_auth,
    )
    if callbacks_enabled:
        loop.create_task(callback.send_messages())

    # initialize and start message saver
    loop.create_task(db.message_saver())

    # start smtp server
    smtp.run(config.CONFIG.smtp_ip, config.CONFIG.smtp_port, config.CONFIG.smtp_auth, config.CONFIG.smtp_ident, config.CONFIG.debug)
    logger.info('smtp server started', host=config.CONFIG.smtp_ip, port=config.CONFIG.smtp_port,
        auth='enabled' if config.CONFIG.smtp_auth else 'disabled',
        password_file=str(config.CONFIG.smtp_auth.path) if config.CONFIG.smtp_auth else None,
        url=f'smtp://{config.CONFIG.smtp_ip}:{config.CONFIG.smtp_port}',
    )

    # initialize and start web server
    app = http.setup()

    runner = aiohttp.web.AppRunner(app)
    loop.run_until_complete(runner.setup())

    site = aiohttp.web.TCPSite(runner, host=config.CONFIG.http_ip, port=config.CONFIG.http_port)
    server = site.start()
    loop.run_until_complete(server)

    logger.info('http server started',
        host=config.CONFIG.http_ip, port=config.CONFIG.http_port,
        url=f'http://{config.CONFIG.http_ip}:{config.CONFIG.http_port}',
        auth='enabled' if config.CONFIG.http_auth else 'disabled',
        password_file=str(config.CONFIG.http_auth.path) if config.CONFIG.http_auth else None,
    )

    # prepare for clean terminate
    async def _initialize_aiohttp_services__stop() -> NoReturn:
        for ws in set(app['websockets']):
            await ws.close(code=aiohttp.WSCloseCode.GOING_AWAY, message='Server shutdown')
        await app.shutdown()

    SHUTDOWN.append(_initialize_aiohttp_services__stop())

    signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT)
    for s in signals:
        loop.add_signal_handler(s, lambda s=s: asyncio.create_task(terminate_server(s, loop)))
Example #20
0
    def run(self, event_loop: AbstractEventLoop = None):
        # Configure the logging system
        if isinstance(self.logging_config, dict):
            logging.config.dictConfig(self.logging_config)
        elif self.logging_config:
            logging.basicConfig(level=logging.INFO)

        # Assign a new default executor with the given max worker thread limit
        event_loop = event_loop or asyncio.get_event_loop()
        event_loop.set_default_executor(ThreadPoolExecutor(self.max_threads))

        # Create the application context
        context = self.create_context()

        try:
            # Start all the components and run the loop until they've finished
            self.logger.info("Starting components")
            coroutines = (component.start(context) for component in self.components)
            coroutines = [coro for coro in coroutines if coro is not None]
            event_loop.run_until_complete(asyncio.gather(*coroutines))
            self.logger.info("All components started")

            # Run the application's custom startup code
            coro = self.start(context)
            if coro is not None:
                event_loop.run_until_complete(coro)

            # Run all the application context's start callbacks
            event_loop.run_until_complete(context.run_callbacks(ContextEventType.started))
            self.logger.info("Application started")
        except Exception as exc:
            self.logger.exception("Error during application startup")
            context.exception = exc
        else:
            # Finally, run the event loop until the process is terminated or Ctrl+C is pressed
            try:
                event_loop.run_forever()
            except (KeyboardInterrupt, SystemExit):
                pass

        event_loop.run_until_complete(context.run_callbacks(ContextEventType.finished))
        event_loop.close()
        self.logger.info("Application stopped")
Example #21
0
def client(
    mock_garbage_collector_task,
    loop: asyncio.AbstractEventLoop,
    aiohttp_client: Callable,
    app_cfg: Dict[str, Any],
    postgres_db: sa.engine.Engine,
    mock_orphaned_services,
    redis_client: Redis,
    monkeypatch_setenv_from_app_config: Callable,
) -> TestClient:
    cfg = deepcopy(app_cfg)

    assert cfg["rest"]["version"] == API_VERSION
    assert cfg["rest"]["enabled"]
    cfg["projects"]["enabled"] = True
    cfg["director"]["enabled"] = True
    cfg[config.CONFIG_SECTION_NAME][
        "garbage_collection_interval_seconds"] = GARBAGE_COLLECTOR_INTERVAL  # increase speed of garbage collection

    # fake config
    monkeypatch_setenv_from_app_config(cfg)

    app = create_safe_application(cfg)

    # activates only security+restAPI sub-modules
    setup_db(app)
    setup_session(app)
    setup_security(app)
    setup_rest(app)
    setup_login(app)
    setup_users(app)
    setup_socketio(app)
    setup_projects(app)
    setup_director(app)
    setup_director_v2(app)
    assert setup_resource_manager(app)

    return loop.run_until_complete(
        aiohttp_client(
            app,
            server_kwargs={
                "port": cfg["main"]["port"],
                "host": cfg["main"]["host"]
            },
        ))
Example #22
0
def client(
    loop: asyncio.AbstractEventLoop,
    aiohttp_client: Callable,
    app_config: Dict[str, Any],  ## waits until swarm with *_services are up
    rabbit_service:
    RabbitConfig,  ## waits until rabbit is responsive and set env vars
    postgres_db: sa.engine.Engine,
    mocker: MockerFixture,
    monkeypatch_setenv_from_app_config: Callable,
):
    app_config["storage"]["enabled"] = False

    monkeypatch_setenv_from_app_config(app_config)
    app = create_safe_application(app_config)

    setup_db(app)
    setup_session(app)
    setup_security(app)
    setup_rest(app)
    setup_diagnostics(app)
    setup_login(app)
    setup_projects(app)
    setup_computation(app)
    setup_director_v2(app)
    setup_socketio(app)

    # GC not relevant for these test-suite,
    mocker.patch(
        "simcore_service_webserver.resource_manager.module_setup.setup_garbage_collector",
        side_effect=lambda app: print(
            f"PATCH @{__name__}:"
            "Garbage collector disabled."
            "Mock bypasses setup_garbage_collector to skip initializing the GC"
        ),
    )
    setup_resource_manager(app)

    return loop.run_until_complete(
        aiohttp_client(
            app,
            server_kwargs={
                "port": app_config["main"]["port"],
                "host": app_config["main"]["host"],
            },
        ))
Example #23
0
def receiver(config_name: str, *, loop: AbstractEventLoop):
    async def _receiver():
        try:
            async with factory.get_receiver(config_name) as queue:
                queue.new_message.bind(on_new_message)
                await queue.listen()

        except (NotFound, CannotImport) as ex:
            print_err(str(ex))
            return -1

        except QueueNotFound:
            print_err(f"Queue not found.")
            return -2

        return 0

    return loop.run_until_complete(_receiver())
Example #24
0
def run_command(addr: Tuple[str, int],
                callback_name: str = '',
                *args,
                loop: asyncio.AbstractEventLoop = None,
                **kwargs):
    """Send a command to the server to run a registered callback function with the given arguments.

    Args:
        addr (tuple): Server IP address
        callback_name (str)['']: Name of the registered callback function.
        *args: Positional arguments for the callback function.
        **kwargs: Keyword Arguments for the callback function.
        loop (asyncio.AbstractEventLoop)[None]: Event loop to run the async command with.
    """
    if loop is None:
        loop = get_loop()
    return loop.run_until_complete(
        run_command_async(addr, callback_name, *args, **kwargs))
def hyper_threading_guard(loop: asyncio.AbstractEventLoop, ht_flag):
    async def _gather_online_core(online_file: Path, core_id: int):
        if online_file.is_file():
            async with aiofiles.open(online_file) as afp:
                line = await afp.readline()
                if line.strip() == '1':
                    return core_id

    jobs: List[Coroutine] = []

    cur_core_id = 1
    while True:
        path = Path(f'/sys/devices/system/cpu/cpu{cur_core_id}')

        if not path.is_dir():
            break

        else:
            jobs.append(_gather_online_core(path / 'online', cur_core_id))

        cur_core_id += 1

    online_core: Set[int] = set(filter(None.__ne__, loop.run_until_complete(asyncio.gather(*jobs))))

    if not ht_flag:
        print('disabling Hyper-Threading...')

        logical_cores: Set[int] = set()

        for cur_core_id in online_core | {0}:
            with open(f'/sys/devices/system/cpu/cpu{cur_core_id}/topology/thread_siblings_list') as fp:
                for core in fp.readline().strip().split(',')[1:]:
                    logical_cores.add(int(core))

        files_to_write = ('/sys/devices/system/cpu/cpu{}/online'.format(core_id) for core_id in logical_cores)
        subprocess.run(('sudo', 'tee', *files_to_write), input="0", encoding='UTF-8', stdout=subprocess.DEVNULL)

        print('Hyper-Threading is disabled.')

    yield

    files_to_write = ('/sys/devices/system/cpu/cpu{}/online'.format(core_id) for core_id in online_core)

    subprocess.run(('sudo', 'tee', *files_to_write), input="1", encoding='UTF-8', stdout=subprocess.DEVNULL)
Example #26
0
 def __init__(self, host: str, port: int, database: str, user: str,
              password: str, p_loop: AbstractEventLoop) -> None:
     self._host = host
     self._port = port
     self._database = database
     self._user = user
     self._password = password
     self._counter = 0
     self._step = 50000
     self._step_counter = 0
     self._pool = p_loop.run_until_complete(
         asyncpg.create_pool(user=user,
                             password=password,
                             database=database,
                             host=host,
                             port=port,
                             loop=p_loop,
                             min_size=15,
                             max_size=15))
Example #27
0
def test_create_server_saved_to_db(
        client: TestClient, event_loop: asyncio.AbstractEventLoop):  # nosec
    response = client.post("/api/rabbit/server",
                           json={
                               "host": "test.com",
                               "name": "server_name",
                               "password": "******"
                           })
    assert response.status_code == 200
    data = response.json()
    assert "id" in data
    server_id = data["id"]

    async def get_server_by_db():
        server = await RabbitServer.get(id=server_id)
        return server

    server_obj = event_loop.run_until_complete(get_server_by_db())
    assert server_obj.id == server_id
Example #28
0
def mock_test_client(
        loop: AbstractEventLoop,
        sanic_test_app: Sanic) -> Generator[None, None, AbstractEventLoop]:
    """Fixture for starting server in the event loop.

    Args:
      loop: Fixture of mocked ``AbstractEventLoop`` object.
      sanic_test_app: Fixture of mocked ``Sanic`` object.

    Returns:
      An event loop with a running server.

    """
    return loop.run_until_complete(
        sanic_test_app.create_server(
            host=get_local_ip_address(),
            port=consts.TEST_SERVER_PORT,
            return_asyncio_server=True,
        ))
 def __init__(
         self, client: httpx.AsyncClient = None,
         proxy: Optional[Union[str, Dict[str, str]]] = None,
         ua: Optional[str] = None,
         cookies: Optional[Dict[str, str]] = None,
         local_client: httpx.AsyncClient = local_client,
         loop: asyncio.AbstractEventLoop = worker_loop,
 ):
     self.client = client or loop.run_until_complete(
         build_client_with_clearance(ua, cookies, proxies=proxy, test=False))
     if client:
         self.proxy = next(iter(client._mounts.values()))._pool._proxy_url
     elif type(proxy) is str:
         self.proxy = proxy
     elif type(proxy) is dict:
         self.proxy = next(iter(proxy.values()))
     self.local_client = local_client
     self.loop = loop
     self.fingerprint = md5(('TiLakPanColNoRhChNeIt' + self.proxy).encode()).hexdigest()
Example #30
0
    def __init__(
        self, loop: asyncio.AbstractEventLoop, pickle_file_path: str, **kwargs
    ):
        self._pickle_file_path = pickle_file_path
        logging.info("Creating a connection pool for DB")
        self.db_pool = loop.run_until_complete(asyncpg.create_pool(**kwargs))
        logging.info("Pooled")

        # Boolean indicating if a launch notification has been sent for the current
        # schedule.
        self._launch_embed_for_current_schedule_sent: bool = False
        # A dict of the most previously sent schedule embed (for comparison).
        self._previous_schedule_embed_dict: Dict = {}

        try:
            with open(self._pickle_file_path, "rb") as f_in:
                tmp = pickle.load(f_in)
            self.__dict__.update(tmp)
            logging.info(f"Updated self.__dict__ from {self._pickle_file_path}")
        except FileNotFoundError:
            logging.info(f"Could not find file at location: {self._pickle_file_path}")
Example #31
0
def client(
    event_loop: asyncio.AbstractEventLoop,
    aiohttp_client: Callable,
    app_config: Dict[str, Any],  ## waits until swarm with *_services are up
    rabbit_service: RabbitSettings,  ## waits until rabbit is responsive and set env vars
    postgres_db: sa.engine.Engine,
    mocker: MockerFixture,
    monkeypatch_setenv_from_app_config: Callable,
):
    app_config["storage"]["enabled"] = False

    monkeypatch_setenv_from_app_config(app_config)
    app = create_safe_application(app_config)

    assert setup_settings(app)
    assert app[APP_SETTINGS_KEY].WEBSERVER_COMPUTATION

    setup_db(app)
    setup_session(app)
    setup_security(app)
    setup_rest(app)
    setup_diagnostics(app)
    setup_login(app)
    setup_projects(app)
    setup_computation(app)
    setup_director_v2(app)
    setup_socketio(app)
    setup_resource_manager(app)
    # GC not relevant for these test-suite,

    return event_loop.run_until_complete(
        aiohttp_client(
            app,
            server_kwargs={
                "port": app_config["main"]["port"],
                "host": app_config["main"]["host"],
            },
        )
    )
Example #32
0
    def scan_for_device(self, loop: asyncio.AbstractEventLoop) -> bool:
        """
         Scan for device and set BLE address

        :param asyncio.AbstractEventLoop loop: Main Asyncio loop to use
        :return: True, if device found. False, else.
        :rtype: bool
        """
        # Warn, if address is already present
        if self.ble_address:
            self.logger.warning("Scanning for device, but address already present: %s", self.ble_address)

        # Start scanning process
        self.logger.info("Scanning for BLE Device by Name: %s", self.name)

        # Attempt to find device
        self.loop_status = LoopStatus.ACTIVE
        escape_counter: int = 0  # Give up after 5 tries
        result: bool = True
        while not self.ble_address and self.loop_status == LoopStatus.ACTIVE:
            # Use Asyncio loop to find bike
            ble_address = loop.run_until_complete(self.scan())

            if ble_address:
                self.ble_address = ble_address
                self.logger.debug("Setting device (%s) to BLE address (%s)", self.name, self.ble_address)
                self.loop_status = LoopStatus.INACTIVE
            else:
                if escape_counter < 4:
                    escape_counter += 1
                else:
                    self.logger.warning("Unable to find device (%s).")
                    result = False

                self.logger.warning("Unable to find device (%s). Retrying...", self.name)

        self.loop_status = LoopStatus.INACTIVE
        return result
Example #33
0
File: bot.py Project: Reliku/hikari
    def _destroy_loop(loop: asyncio.AbstractEventLoop) -> None:
        async def murder(future: asyncio.Future[typing.Any]) -> None:
            # These include _GatheringFuture which must be awaited if the children
            # throw an asyncio.CancelledError, otherwise it will spam logs with warnings
            # about exceptions not being retrieved before GC.
            try:
                _LOGGER.log(ux.TRACE, "killing %s", future)
                future.cancel()
                await future
            except asyncio.CancelledError:
                pass
            except Exception as ex:
                loop.call_exception_handler({
                    "message":
                    "Future raised unexpected exception after requesting cancellation",
                    "exception": ex,
                    "future": future,
                })

        remaining_tasks = [
            t for t in asyncio.all_tasks(loop)
            if not t.cancelled() and not t.done()
        ]

        if remaining_tasks:
            _LOGGER.debug("terminating %s remaining tasks forcefully",
                          len(remaining_tasks))
            loop.run_until_complete(
                asyncio.gather(*(murder(task) for task in remaining_tasks)))
        else:
            _LOGGER.debug("No remaining tasks exist, good job!")

        if sys.version_info >= (3, 9):
            _LOGGER.debug("shutting down default executor")
            with contextlib.suppress(NotImplementedError):
                # This seems to raise a NotImplementedError when running with uvloop.
                loop.run_until_complete(loop.shutdown_default_executor())

        _LOGGER.debug("shutting down asyncgens")
        loop.run_until_complete(loop.shutdown_asyncgens())

        _LOGGER.debug("closing event loop")
        loop.close()
Example #34
0
 def bruh(self, encoder, loop: asyncio.AbstractEventLoop):
     asyncio.set_event_loop(loop)
     loop.run_until_complete(encoder.encode())
     loop.close()
Example #35
0
def model_server(loop: asyncio.AbstractEventLoop, sanic_client: Callable,
                 trained_moodbot_path: Text) -> TestClient:
    app = model_server_app(trained_moodbot_path, model_hash="somehash")
    return loop.run_until_complete(sanic_client(app))
Example #36
0
def redis_pool(event_loop: asyncio.AbstractEventLoop, redis_port: int):
    redis_pool = event_loop.run_until_complete(
        aioredis.create_pool(('localhost', redis_port)))
    yield redis_pool
    redis_pool.close()
    event_loop.run_until_complete(redis_pool.wait_closed())