示例#1
0
    def run_server(self, loop: asyncio.BaseEventLoop):
        asyncio.set_event_loop(loop)
        self.loop = loop

        handler = self.app.make_handler()
        server = loop.create_server(handler, host=self.local, port=self.port)

        loop.run_until_complete(server)
        loop.run_forever()
示例#2
0
    def start(self, loop: asyncio.BaseEventLoop):
        self.ws = websockets.serve(lambda ws, p: self.handler(ws, p),
                                   self.host,
                                   self.port,
                                   loop=loop)

        loop.run_until_complete(self.ws)
        log.debug("Started websocket server.")
        log.info(f"Started websocket bridge on {self.host}:{self.port}")
示例#3
0
def asyncio_shutdown(loop: asyncio.BaseEventLoop = asyncio.get_event_loop()) -> None:
    try:
        asyncio.runners._cancel_all_tasks(loop)
        loop.run_until_complete(loop.shutdown_asyncgens())

        if sys.version_info.minor > 8:
            loop.run_until_complete(loop.shutdown_default_executor())
    finally:
        asyncio.set_event_loop(None)
        loop.close()
示例#4
0
文件: http.py 项目: denosawr/pymine
    def start(self, loop: asyncio.BaseEventLoop):
        app = web.Application()
        app.add_routes([web.get("/{command}", lambda r: self.handler(r))])

        runner = web.AppRunner(app)
        loop.run_until_complete(runner.setup())
        self.site = web.TCPSite(runner, self.host, self.port)

        loop.run_until_complete(self.site.start())

        log.info(f"Serving HTTP bridge on port {self.port}")
示例#5
0
def start_background_lock_extender(lock_manager: Aioredlock, lock: Lock,
                                   loop: asyncio.BaseEventLoop) -> None:
    """Will periodically extend the duration of the lock"""
    async def extender_worker(lock_manager: Aioredlock):
        sleep_interval = 0.9 * config.REDLOCK_REFRESH_INTERVAL_SECONDS
        while True:
            await lock_manager.extend(lock,
                                      config.REDLOCK_REFRESH_INTERVAL_SECONDS)

            await asyncio.sleep(sleep_interval)

    loop.run_until_complete(extender_worker(lock_manager))
def distribute_global_resources(
        loop: asyncio.BaseEventLoop,
        blob_client: azure.storage.blob.BlockBlobService,
        queue_client: azure.storage.queue.QueueService,
        table_client: azure.storage.table.TableService,
        ipaddress: str) -> None:
    """Distribute global services/resources
    :param asyncio.BaseEventLoop loop: event loop
    :param azure.storage.blob.BlockBlobService blob_client: blob client
    :param azure.storage.queue.QueueService queue_client: queue client
    :param azure.storage.table.TableService table_client: table client
    :param str ipaddress: ip address
    """
    # set torrent session port listen
    if _ENABLE_P2P:
        global _TORRENT_SESSION
        # create torrent session
        logger.info('creating torrent session on {}:{}'.format(
            ipaddress, _DEFAULT_PORT_BEGIN))
        _TORRENT_SESSION = libtorrent.session()
        _TORRENT_SESSION.listen_on(_DEFAULT_PORT_BEGIN, _DEFAULT_PORT_END)
        _TORRENT_SESSION.stop_lsd()
        _TORRENT_SESSION.stop_upnp()
        _TORRENT_SESSION.stop_natpmp()
        # bootstrap dht nodes
        bootstrap_dht_nodes(loop, table_client, ipaddress, 0)
        _TORRENT_SESSION.start_dht()
    # get globalresources from table
    try:
        entities = table_client.query_entities(
            _STORAGE_CONTAINERS['table_globalresources'],
            filter='PartitionKey eq \'{}\''.format(_PARTITION_KEY))
    except azure.common.AzureMissingResourceHttpError:
        entities = None
    nentities = 0
    # check torrent info table for resource
    if entities is not None:
        for ent in entities:
            nentities += 1
            if _ENABLE_P2P:
                _check_resource_has_torrent(blob_client, table_client,
                                            ent['Resource'])
            else:
                with _DIRECTDL_LOCK:
                    _DIRECTDL.append(ent['Resource'])
    if nentities == 0:
        logger.info('no global resources specified')
        return
    # run async func in loop
    loop.run_until_complete(
        download_monitor_async(loop, blob_client, queue_client, table_client,
                               ipaddress, nentities))
示例#7
0
    def start(self, loop: asyncio.BaseEventLoop):
        ws_future = websockets.serve(
            lambda ws, p: self.handler(ws, p),
            self.host,
            self.port,
            subprotocols=["com.microsoft.minecraft.wsencrypt"],
            ping_interval=None,
            loop=loop,
        )
        self.ws = loop.run_until_complete(ws_future)
        loop.run_until_complete(
            self.set_uninitiated_handler_status(ready=False))

        log.info(f"Started Minecraft connector on {self.host}:{self.port}")
示例#8
0
def start_child_thread_loop(
        loop: asyncio.BaseEventLoop, break_sign: BreakSign,
        main_fetch_callback: Callable[[Union[Future, SyncFuture]], None],
        child_fetch_callback: Callable[[Union[Future, SyncFuture]], None],
        request_package_list: RequestPackageList, headers: Union[dict, None],
        total_timeout: int, concurrency_number: int) -> None:
    downloader_logger.debug('Async downloader start.')

    asyncio.set_event_loop(loop)
    main_fetch_task = loop.create_task(
        main_fetch(request_package_list, break_sign, child_fetch_callback,
                   headers, total_timeout, concurrency_number))
    main_fetch_task.add_done_callback(main_fetch_callback)
    loop.run_until_complete(main_fetch_task)
    downloader_logger.debug('Async download thread over.')
示例#9
0
    def feeds_check(*feed_names,
                    data_dir: str = None,
                    loop: asyncio.BaseEventLoop = None):
        """Check feeds for name validity.

        :raises: ValueError if any feed name is invalid
        """
        loop = loop or asyncio.get_event_loop()

        # remove local feeds
        distinct_feeds = list(
            filter(
                lambda f: not FeedManager.feeds_exist(
                    f, data_dir=data_dir, loop=loop), feed_names))

        futures = [
            JSONFeedMetadata.url_exists(feed, loop=loop)
            for feed in distinct_feeds
        ]
        tasks = asyncio.gather(*futures)

        results = loop.run_until_complete(tasks)
        invalid = [
            feed for valid, feed in zip(results, distinct_feeds) if not valid
        ]

        if any(invalid):
            raise ValueError(f"Invalid feeds found: {invalid}")
示例#10
0
def aiohttp_raw_server(event_loop: asyncio.BaseEventLoop, unused_tcp_port):
    from aiohttp.test_utils import RawTestServer

    servers = []

    async def go(handler):
        server = RawTestServer(handler, port=unused_tcp_port)
        await server.start_server()
        servers.append(server)
        return server

    yield go

    async def finalize() -> None:
        while servers:
            await servers.pop().close()

    event_loop.run_until_complete(finalize())
def get_ip(service_list: Iterable[Service] = None,
           loop: asyncio.BaseEventLoop = None) -> str:
    if service_list is None:
        service_list = services()

    if loop is None:
        loop = asyncio.get_event_loop()

    ip_task = _get_ip(service_list)
    ip = loop.run_until_complete(ip_task)
    return ip
示例#12
0
    def monitor(self, loop: asyncio.BaseEventLoop):
        """Monitor for new messages published to the subscribed Redis channel.

        When a message is received, it will be published to all
        available websockets associated with a specific user ID.

        """
        # `listen()` blocks forever so we monitor for messages
        # in a separate event loop in a new thread running as a daemon.
        asyncio.set_event_loop(loop)
        return loop.run_until_complete(self._handle_new_messages())
示例#13
0
def distribute_global_resources(loop: asyncio.BaseEventLoop,
                                blob_client: azureblob.BlockBlobService,
                                table_client: azuretable.TableService) -> None:
    """Distribute global services/resources
    :param asyncio.BaseEventLoop loop: event loop
    :param azureblob.BlockBlobService blob_client: blob client
    :param azuretable.TableService table_client: table client
    """
    # remove node from the image table because cascade relies on it to know
    # when its work is done
    _unmerge_resources(table_client)
    # get globalresources from table
    try:
        entities = table_client.query_entities(
            _STORAGE_CONTAINERS['table_globalresources'],
            filter='PartitionKey eq \'{}\''.format(_PARTITION_KEY))
    except azure.common.AzureMissingResourceHttpError:
        entities = []
    nentities = 0
    for ent in entities:
        resource = ent['Resource']
        grtype, image = get_container_image_name_from_resource(resource)
        if grtype == _CONTAINER_MODE.name.lower():
            nentities += 1
            _DIRECTDL_QUEUE.put(resource)
            key_fingerprint = ent.get('KeyFingerprint', None)
            if key_fingerprint is not None:
                _DIRECTDL_KEY_FINGERPRINT_DICT[image] = key_fingerprint
        else:
            logger.info('skipping resource {}:'.format(resource) +
                        'not matching container mode "{}"'.format(
                            _CONTAINER_MODE.name.lower()))
    if nentities == 0:
        logger.info('no global resources specified')
        return
    logger.info('{} global resources matching container mode "{}"'.format(
        nentities, _CONTAINER_MODE.name.lower()))
    # run async func in loop
    loop.run_until_complete(
        download_monitor_async(loop, blob_client, table_client, nentities))
示例#14
0
def check_proxy_async(proxies: list, loop: asyncio.BaseEventLoop) -> list:
    """异步检查代理是否可用
       `proxies`: 代理列表
       `loop`: 事件循环
    """

    tasks = []
    url = "http://httpbin.org/ip"
    for proxy in proxies:
        task = asyncio.ensure_future(
            send_request(url=url, method="get", proxy=proxy))
        tasks.append(task)
    result = loop.run_until_complete(asyncio.gather(*tasks))
    check_result = [
        True if item["origin"] != "false" else False for item in result
    ]
    return check_result
示例#15
0
def fake_aioredis(request: FixtureRequest, loop: asyncio.BaseEventLoop):
    r: aioredis.Redis = loop.run_until_complete(
        aioredis.create_redis('redis://localhost:6379', db=2,
                              encoding='utf-8'))
    loop.run_until_complete(r.flushdb())
    redis_objects = filter(
        lambda i: i != db.RedisObject and issubclass(i, db.RedisObject),
        filter(lambda i: type(i) == type, map(lambda i: getattr(db, i),
                                              dir(db))))
    for redis_object in redis_objects:
        redis_object.register_db(r)
    yield r
    loop.run_until_complete(r.flushdb())
    r._pool_or_conn.close()
    loop.run_until_complete(r._pool_or_conn.wait_closed())
示例#16
0
def client(loop: asyncio.BaseEventLoop, test_server, test_client,
           credentials_checker, token_provider):
    app = aiohttp.web.Application(loop=loop)
    db = None  # TODO
    app_server = Server(app, credentials_checker, token_provider, db, loop=loop)
    loop.run_until_complete(app_server.start())
    server = loop.run_until_complete(test_server(app))
    client = loop.run_until_complete(test_client(server))

    yield client

    loop.run_until_complete(app_server.stop())
示例#17
0
def start_async_request(urls: list,
                        loop: asyncio.BaseEventLoop,
                        method: Optional[str] = "get",
                        text_type: Optional[str] = "json") -> list:
    """调用异步请求同时发送多条请求并返回请求的响应
       `urls`: url列表
       `loop`: 事件循环
       `method`: http请求的方法,默认为get,支持post
       `text_type`: 返回的响应的格式,默认的为json,支持text,binary
    """

    tasks = []
    for url in urls:
        task = asyncio.ensure_future(
            send_request(url, method, text_type=text_type))
        tasks.append(task)
    result = loop.run_until_complete(asyncio.gather(*tasks))
    return result
示例#18
0
    def feeds_exist(*feed_names,
                    data_dir: str = None,
                    loop: asyncio.BaseEventLoop = None) -> bool:
        """Check feeds whether exist locally.

        :raises: ValueError if feed does not exist.
        """
        loop = loop or asyncio.get_event_loop()

        futures = [
            JSONFeedMetadata.metadata_exist(feed, data_dir=data_dir, loop=loop)
            for feed in feed_names
        ]
        tasks = asyncio.gather(*futures)

        results = loop.run_until_complete(tasks)
        for valid, feed in zip(results, feed_names):
            if not valid:
                return False

        return True
示例#19
0
 def start_manager(self, loop: asyncio.BaseEventLoop):
     loop.run_until_complete(self.run_manager())
示例#20
0
 def _after_stop(self, loop: BaseEventLoop):
     for (hook_name, hook_handler) in self._hooks:
         if hook_name == HookTypes.after_server_stop:
             loop.run_until_complete(hook_handler(context))
    def _thread_run(_thread_loop: asyncio.BaseEventLoop):
        # Running on thread that will host proc_loop

        async def __run():
            # Running within proc_loop
            # asyncio.get_event_loop().set_debug(True)

            try:
                # print("Server is launching", cmd, *args, flush=True)
                proc = await asyncio.create_subprocess_exec(
                    cmd, *args,
                    stdin=asyncio.subprocess.PIPE,
                    stdout=asyncio.subprocess.PIPE,
                    stderr=asyncio.subprocess.PIPE)

                async def __process_output(_out: asyncio.StreamReader, _output_callback: Callable):
                    # Runs within proc_loop
                    try:
                        while True:
                            buf = b''
                            line = None
                            while line is None:
                                try:
                                    # Handle an incomplete line output such as when
                                    # a command prompt leaves the input cursor at the end.
                                    c = await asyncio.wait_for(_out.read(1), 0.1)
                                except asyncio.futures.TimeoutError:
                                    if buf:
                                        line = buf
                                # except Exception as ex:
                                #     print("Exception", type(ex), ex, file=sys.stderr, flush=True)
                                #     pass
                                else:
                                    buf += c
                                    if c == b'\n':
                                        line = buf

                                    # Handle EOF
                                    elif c == b'':
                                        line = buf
                                        if line:
                                            # First send whatever line we have left
                                            part = partial(_output_callback, line)
                                            asyncio.run_coroutine_threadsafe(output_callback_queue.put(part),
                                                                             parent_loop)
                                        # Then send a marker saying we're done
                                        part = partial(_output_callback, None)
                                        asyncio.run_coroutine_threadsafe(output_callback_queue.put(part), parent_loop)
                                        return

                            if line:
                                part = partial(_output_callback, line)
                                asyncio.run_coroutine_threadsafe(output_callback_queue.put(part), parent_loop)
                            else:
                                break
                    except Exception as ex:
                        print("Error in __process_output:", ex.__class__.__name__, ex, file=sys.stderr, flush=True)
                        traceback.print_tb(sys.exc_info()[2])

                async def __receive_input(_input_provider: AsyncIterator[str]):
                    # Runs in parent_loop
                    # asyncio.get_event_loop().set_debug(True)
                    async for __line in _input_provider:
                        proc.stdin.write(f"{__line}\n".encode())

                    proc.stdin.write_eof()
                    # input_done_evt.set()

                tasks = []
                if provide_stdin:
                    asyncio.run_coroutine_threadsafe(__receive_input(provide_stdin), parent_loop)
                    # parent_loop_tasks.add(parent_loop.create_task(input_done_evt.wait()))
                if handle_stdout:
                    tasks.append(_thread_loop.create_task(__process_output(proc.stdout, handle_stdout)))
                if handle_stderr:
                    tasks.append(_thread_loop.create_task(__process_output(proc.stderr, handle_stderr)))

                # print("GATHERING...", flush=True)
                await asyncio.gather(*tasks)
                # print(f"GATHERED {pprint.pformat(tasks)}", flush=True)

            except Exception as ex:
                print(ex, file=sys.stderr, flush=True)
                traceback.print_tb(sys.exc_info()[2])

        asyncio.set_event_loop(_thread_loop)
        _thread_loop.run_until_complete(__run())
        parent_loop.call_soon_threadsafe(thread_done_evt.set)
        # parent_loop.call_soon_threadsafe(input_done_evt.set)
        print("Thread-proc run closed.", flush=True)
示例#22
0
def client(application: web.Application, loop: BaseEventLoop,
           aiohttp_client: Any, mocker: MockerFixture) -> TestClient:
    mocker.patch("pathlib.Path.iterdir", return_value=[])
    return loop.run_until_complete(aiohttp_client(application))
示例#23
0
def loop_in_thread(event_loop: asyncio.BaseEventLoop, function: Callable):
    asyncio.set_event_loop(event_loop)
    event_loop.run_until_complete(function())
示例#24
0
def qaueue_client(loop: asyncio.BaseEventLoop, aiohttp_client,
                  qaueue_app: web.Application):
    yield loop.run_until_complete(aiohttp_client(qaueue_app))
示例#25
0
def aloop(loop: asyncio.BaseEventLoop, f, a):
    asyncio.set_event_loop(loop)
    loop.run_until_complete(f(a))
示例#26
0
    def __call__(self, loop: asyncio.BaseEventLoop = None) -> Any:
        if loop is None:
            loop = asyncio.get_event_loop()

        return loop.run_until_complete(self.wait())