import itertools import logging import asyncio import contextvars from functools import partial, reduce import copy from ..core import State, Condition, Transition, EventData, listify from ..core import Event, MachineError, Machine from .nesting import HierarchicalMachine, NestedState, NestedEvent, NestedTransition, _resolve_order _LOGGER = logging.getLogger(__name__) _LOGGER.addHandler(logging.NullHandler()) is_subtask = contextvars.ContextVar('is_subtask', default=False) class AsyncState(State): """A persistent representation of a state managed by a ``Machine``. Callback execution is done asynchronously. Attributes: name (str): State name which is also assigned to the model(s). on_enter (list): Callbacks awaited when a state is entered. on_exit (list): Callbacks awaited when a state is entered. ignore_invalid_triggers (bool): Indicates if unhandled/invalid triggers should raise an exception. """ async def enter(self, event_data): """ Triggered when a state is entered. """ _LOGGER.debug("%sEntering state %s. Processing callbacks...", event_data.machine.name, self.name)
import copy import json import logging import multiprocessing import pathlib import re from typing import Any, Dict, List, Set, Tuple, Union import uuid import mtgjson4 from mtgjson4.provider import gatherer, scryfall, tcgplayer from mtgjson4.util import is_number LOGGER = logging.getLogger(__name__) SESSION: contextvars.ContextVar = contextvars.ContextVar("SESSION") def build_output_file(sf_cards: List[Dict[str, Any]], set_code: str, skip_tcgplayer: bool) -> Dict[str, Any]: """ Compile the entire XYZ.json file and pass it off to be written out :param skip_tcgplayer: Skip building TCGPlayer stuff :param sf_cards: Scryfall cards :param set_code: Set code :return: Completed JSON file """ output_file: Dict[str, Any] = {} # Get the set config from Scryfall set_config = scryfall.download(scryfall.SCRYFALL_API_SETS + set_code)
import configparser import contextvars import json import logging import time from typing import Any, Dict, List, Optional import requests import requests_cache import mtgjson4 from mtgjson4 import util LOGGER = logging.getLogger(__name__) SESSION: contextvars.ContextVar = contextvars.ContextVar("SESSION_STOCKS") SESSION_TOKEN: contextvars.ContextVar = contextvars.ContextVar("SESSION_STOCKS") STOCKS_DATA: contextvars.ContextVar = contextvars.ContextVar("STOCKS_DATA") MTG_STOCKS_API_URL: str = "https://api.mtgstocks.com/api/v1/mtgjson?token={}" MTG_STOCKS_REFERRAL_URL: str = "https://www.mtgstocks.com/prints/{}" def __get_session() -> requests.Session: """ Get or create a requests session for MTGStocks. :return Session data """ if mtgjson4.USE_CACHE.get(): requests_cache.install_cache( "stocks_cache",
import json import contextvars result = contextvars.ContextVar("result") class Loader(object): def __init__(self, test_case): self.test_case = test_case def __enter__(self): with open("tests/results", "r") as results_file: self.results = json.load(results_file) result.set(self.results[self.test_case] if self.test_case in self.results.keys() else None) def __exit__(self, exc_type, exc_val, exc_tb): with open("tests/results", "w") as results_file: if (self.test_case not in self.results.keys() and result.get() is not None) \ or result.get() != self.results[self.test_case]: self.results[self.test_case] = result.get() results_file.write( json.dumps(self.results, indent=4, sort_keys=True)) if exc_val: raise exc_val def load_result(f): def decorated(*args, **kwargs): test_case = args[0].id() with Loader(test_case):
def __init__(self, redis, name, timeout=None, sleep=0.1, blocking=True, blocking_timeout=None, thread_local=True): """ Create a new Lock instance named ``name`` using the Redis client supplied by ``redis``. ``timeout`` indicates a maximum life for the lock. By default, it will remain locked until release() is called. ``timeout`` can be specified as a float or integer, both representing the number of seconds to wait. ``sleep`` indicates the amount of time to sleep per loop iteration when the lock is in blocking mode and another client is currently holding the lock. ``blocking`` indicates whether calling ``acquire`` should block until the lock has been acquired or to fail immediately, causing ``acquire`` to return False and the lock not being acquired. Defaults to True. Note this value can be overridden by passing a ``blocking`` argument to ``acquire``. ``blocking_timeout`` indicates the maximum amount of time in seconds to spend trying to acquire the lock. A value of ``None`` indicates continue trying forever. ``blocking_timeout`` can be specified as a float or integer, both representing the number of seconds to wait. ``thread_local`` indicates whether the lock token is placed in thread-local storage. By default, the token is placed in thread local storage so that a thread only sees its token, not a token set by another thread. Consider the following timeline: time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds. thread-1 sets the token to "abc" time: 1, thread-2 blocks trying to acquire `my-lock` using the Lock instance. time: 5, thread-1 has not yet completed. redis expires the lock key. time: 5, thread-2 acquired `my-lock` now that it's available. thread-2 sets the token to "xyz" time: 6, thread-1 finishes its work and calls release(). if the token is *not* stored in thread local storage, then thread-1 would see the token value as "xyz" and would be able to successfully release the thread-2's lock. In some use cases it's necessary to disable thread local storage. For example, if you have code where one thread acquires a lock and passes that lock instance to a worker thread to release later. If thread local storage isn't disabled in this case, the worker thread won't see the token set by the thread that acquired the lock. Our assumption is that these cases aren't common and as such default to using thread local storage. """ self.redis = redis self.name = name self.timeout = timeout self.sleep = sleep self.blocking = blocking self.blocking_timeout = blocking_timeout self.thread_local = bool(thread_local) self.local = contextvars.ContextVar( 'token', default=None) if self.thread_local else dummy() if self.timeout and self.sleep > self.timeout: raise LockError("'sleep' must be less than 'timeout'")
import sys import re import contextvars from itertools import chain from collections import defaultdict import numba from .base import lib, ffi, _gb_from_name, _check from . import types current_accum = contextvars.ContextVar('current_accum') current_binop = contextvars.ContextVar('current_binop') class BinaryOp: _auto_binaryops = defaultdict(dict) __slots__ = ('name', 'binaryop', 'token') def __init__(self, op, typ, binaryop, udt=None, boolean=False): if udt is not None: o = ffi.new('GrB_BinaryOp*') udt = udt.gb_type lib.GrB_BinaryOp_new( o, ffi.cast('GxB_binary_function', binaryop.address), lib.GrB_BOOL if boolean else udt, udt, udt) self.binaryop = o[0] else: self.binaryop = binaryop self.name = '_'.join((op, typ))
) else: for index, file in enumerate(files): multipart.append( { 'name': f'file{index}', 'value': file.fp, 'filename': file.filename, 'content_type': 'application/octet-stream', } ) return ExecuteWebhookParameters(payload=payload, multipart=multipart, files=files) async_context = contextvars.ContextVar('async_webhook_context', default=AsyncWebhookAdapter()) class PartialWebhookChannel(Hashable): """Represents a partial channel for webhooks. These are typically given for channel follower webhooks. .. versionadded:: 2.0 Attributes ----------- id: :class:`int` The partial channel's ID. name: :class:`str` The partial channel's name.
import contextvars import inspect import json from django.contrib.auth import get_user_model from django.db.models import ForeignKey, ManyToManyField, OneToOneField from django.http import HttpRequest from rest_framework.request import Request from fullctl.django.inet.util import get_client_ip from fullctl.django.models import AuditLog, Organization User = get_user_model() CTX_VARS = { "user": contextvars.ContextVar("auditlog_user"), "org": contextvars.ContextVar("auditlog_org"), "key": contextvars.ContextVar("auditlog_key"), "info": contextvars.ContextVar("auditlog_info"), "data": contextvars.ContextVar("auditlog_data"), "ip_address": contextvars.ContextVar("auditlog_ip_address"), } SENSITIVE_KEYS = [ "password", "key", "secret", "token", ] UNWANTED_KEYS = [
def set(self, key: t.Hashable, value: t.Any) -> None: try: var = self._store[key] except KeyError: var = self._store[key] = _contextvars.ContextVar("picobox") var.set(value)
import configparser import contextvars import json import logging from typing import Any, Dict, List, Optional, Set, Tuple import requests import requests.adapters import requests_cache import mtgjson4 from mtgjson4 import util LOGGER = logging.getLogger(__name__) SESSION: contextvars.ContextVar = contextvars.ContextVar("SESSION_SCRYFALL") SCRYFALL_API_SETS: str = "https://api.scryfall.com/sets/" SCRYFALL_API_CARD: str = "https://api.scryfall.com/cards/" SCRYFALL_VARIATIONS: str = "https://api.scryfall.com/cards/search?q=is%3Avariation%20set%3A{0}&unique=prints" SCRYFALL_SET_SIZE: str = "https://api.scryfall.com/cards/search?order=set&q=set:{0}%20is:booster%20unique:prints" BASE_SET_FILE_CACHE: contextvars.ContextVar = contextvars.ContextVar( "BASE_SET_FILE_CACHE") def __get_session() -> requests.Session: """Get or create a requests session for scryfall.""" if mtgjson4.USE_CACHE.get(): requests_cache.install_cache( str(mtgjson4.PROJECT_CACHE_PATH.joinpath("scryfall_cache")),
import contextvars import unittest from test import support support.requires_working_socket(module=True) class MyException(Exception): pass def tearDownModule(): asyncio.set_event_loop_policy(None) VAR = contextvars.ContextVar('VAR', default=()) class TestAsyncCase(unittest.TestCase): maxDiff = None def tearDown(self): # Ensure that IsolatedAsyncioTestCase instances are destroyed before # starting a new event loop support.gc_collect() def test_full_cycle(self): class Test(unittest.IsolatedAsyncioTestCase): def setUp(self): self.assertEqual(events, []) events.append('setUp')
if field.type.to_short: val = field.type.to_short(val) shorts[key] = str(val) return shorts @contextlib.contextmanager def ctx(cvar, val): token = cvar.set(val) try: yield finally: cvar.reset(token) amp = contextvars.ContextVar("amp") freq = contextvars.ContextVar("freq") @dataclass class FullWave(Parameterized): freq: Parameter( name="frequency", key="f", default=2, adjacent_step=1, random=lambda rnd: rnd.randrange(*freq.get((1, 6, 1))), ) amp: Parameter( name="amplitude", key="a",
import contextvars # Ick, global state botinstance = contextvars.ContextVar('bot')
class SyncToAsync: """ Utility class which turns a synchronous callable into an awaitable that runs in a threadpool. It also sets a threadlocal inside the thread so calls to AsyncToSync can escape it. If thread_sensitive is passed, the code will run in the same thread as any outer code. This is needed for underlying Python code that is not threadsafe (for example, code which handles SQLite database connections). If the outermost program is async (i.e. SyncToAsync is outermost), then this will be a dedicated single sub-thread that all sync code runs in, one after the other. If the outermost program is sync (i.e. AsyncToSync is outermost), this will just be the main thread. This is achieved by idling with a CurrentThreadExecutor while AsyncToSync is blocking its sync parent, rather than just blocking. If executor is passed in, that will be used instead of the loop's default executor. In order to pass in an executor, thread_sensitive must be set to False, otherwise a TypeError will be raised. """ # If they've set ASGI_THREADS, update the default asyncio executor for now if "ASGI_THREADS" in os.environ: # We use get_event_loop here - not get_running_loop - as this will # be run at import time, and we want to update the main thread's loop. loop = asyncio.get_event_loop() loop.set_default_executor( ThreadPoolExecutor(max_workers=int(os.environ["ASGI_THREADS"]))) # Maps launched threads to the coroutines that spawned them launch_map: "Dict[threading.Thread, asyncio.Task[object]]" = {} # Storage for main event loop references threadlocal = threading.local() # Single-thread executor for thread-sensitive code single_thread_executor = ThreadPoolExecutor(max_workers=1) # Maintain a contextvar for the current execution context. Optionally used # for thread sensitive mode. thread_sensitive_context: "contextvars.ContextVar[str]" = contextvars.ContextVar( "thread_sensitive_context") # Contextvar that is used to detect if the single thread executor # would be awaited on while already being used in the same context deadlock_context: "contextvars.ContextVar[bool]" = contextvars.ContextVar( "deadlock_context") # Maintaining a weak reference to the context ensures that thread pools are # erased once the context goes out of scope. This terminates the thread pool. context_to_thread_executor: "weakref.WeakKeyDictionary[object, ThreadPoolExecutor]" = ( weakref.WeakKeyDictionary()) def __init__( self, func: Callable[..., Any], thread_sensitive: bool = True, executor: Optional["ThreadPoolExecutor"] = None, ) -> None: if not callable(func) or _iscoroutinefunction_or_partial(func): raise TypeError( "sync_to_async can only be applied to sync functions.") self.func = func functools.update_wrapper(self, func) self._thread_sensitive = thread_sensitive self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore if thread_sensitive and executor is not None: raise TypeError( "executor must not be set when thread_sensitive is True") self._executor = executor try: self.__self__ = func.__self__ # type: ignore except AttributeError: pass async def __call__(self, *args, **kwargs): loop = asyncio.get_running_loop() # Work out what thread to run the code in if self._thread_sensitive: if hasattr(AsyncToSync.executors, "current"): # If we have a parent sync thread above somewhere, use that executor = AsyncToSync.executors.current elif self.thread_sensitive_context and self.thread_sensitive_context.get( None): # If we have a way of retrieving the current context, attempt # to use a per-context thread pool executor thread_sensitive_context = self.thread_sensitive_context.get() if thread_sensitive_context in self.context_to_thread_executor: # Re-use thread executor in current context executor = self.context_to_thread_executor[ thread_sensitive_context] else: # Create new thread executor in current context executor = ThreadPoolExecutor(max_workers=1) self.context_to_thread_executor[ thread_sensitive_context] = executor elif self.deadlock_context and self.deadlock_context.get(False): raise RuntimeError( "Single thread executor already being used, would deadlock" ) else: # Otherwise, we run it in a fixed single thread executor = self.single_thread_executor if self.deadlock_context: self.deadlock_context.set(True) else: # Use the passed in executor, or the loop's default if it is None executor = self._executor context = contextvars.copy_context() child = functools.partial(self.func, *args, **kwargs) func = context.run args = (child, ) kwargs = {} try: # Run the code in the right thread future = loop.run_in_executor( executor, functools.partial( self.thread_handler, loop, self.get_current_task(), sys.exc_info(), func, *args, **kwargs, ), ) ret = await asyncio.wait_for(future, timeout=None) finally: _restore_context(context) if self.deadlock_context: self.deadlock_context.set(False) return ret def __get__(self, parent, objtype): """ Include self for methods """ return functools.partial(self.__call__, parent) def thread_handler(self, loop, source_task, exc_info, func, *args, **kwargs): """ Wraps the sync application with exception handling. """ # Set the threadlocal for AsyncToSync self.threadlocal.main_event_loop = loop self.threadlocal.main_event_loop_pid = os.getpid() # Set the task mapping (used for the locals module) current_thread = threading.current_thread() if AsyncToSync.launch_map.get(source_task) == current_thread: # Our parent task was launched from this same thread, so don't make # a launch map entry - let it shortcut over us! (and stop infinite loops) parent_set = False else: self.launch_map[current_thread] = source_task parent_set = True # Run the function try: # If we have an exception, run the function inside the except block # after raising it so exc_info is correctly populated. if exc_info[1]: try: raise exc_info[1] except BaseException: return func(*args, **kwargs) else: return func(*args, **kwargs) finally: # Only delete the launch_map parent if we set it, otherwise it is # from someone else. if parent_set: del self.launch_map[current_thread] @staticmethod def get_current_task(): """ Implementation of asyncio.current_task() that returns None if there is no task. """ try: return asyncio.current_task() except RuntimeError: return None
import contextvars user_id = contextvars.ContextVar("user_id") def f1(user, operation): user_id.set(user.id) f2() def f2(): f3() def f3(): print(user_id.get(default=None)) f3()
Raises ------ Will raise an exception on any CUDA error. Returns ------- (free, total) : (int, int) """ free = ctypes.c_uint64() total = ctypes.c_uint64() dev_id = ctypes.c_int(device_id) check_call(_LIB.MXGetGPUMemoryInformation64(dev_id, ctypes.byref(free), ctypes.byref(total))) return (free.value, total.value) _current = contextvars.ContextVar('namemanager', default=Context('cpu', 0)) def current_context(): """Returns the current context. By default, `mx.cpu()` is used for all the computations and it can be overridden by using `with mx.Context(x)` statement where x can be cpu(device_id) or gpu(device_id). Examples ------- >>> mx.current_context() cpu(0) >>> with mx.Context('gpu', 1): # Context changed in `with` block. ... mx.current_context() # Computation done here will be on gpu(1).
import textwrap import contextvars from pprint import isrecursive from itertools import count from collections import abc _recursive_ids = contextvars.ContextVar('recursive') class TreePrinter: """Default printer for printree. Uses unicode characters. """ ROOT = '┐' EDGE = '│ ' BRANCH_NEXT = '├── ' BRANCH_LAST = '└── ' ARROW = '→' def __init__(self, depth: int = None, annotated: bool = False): """ :param depth: If the data structure being printed is too deep, the next contained level is replaced by [...]. By default, there is no constraint on the depth of the objects being formatted. :param annotated: Whether or not to include annotations for branches, like the object type and amount of children. """ self.level = 0 self.depth = depth self.annotated = bool(annotated) @property def depth(self) -> int:
import re from typing import Any, Dict, List, Optional, Tuple, Union import contextvars from logzero import logger import yaml from chaoslib.types import Settings __all__ = [ "get_loaded_settings", "load_settings", "save_settings", "locate_settings_entry" ] CHAOSTOOLKIT_CONFIG_PATH = os.path.abspath( os.path.expanduser("~/.chaostoolkit/settings.yaml")) loaded_settings = contextvars.ContextVar('loaded_settings', default={}) def load_settings(settings_path: str = CHAOSTOOLKIT_CONFIG_PATH) -> Settings: """ Load chaostoolkit settings as a mapping of key/values or return `None` when the file could not be found. """ if not os.path.exists(settings_path): logger.debug("The Chaos Toolkit settings file could not be found at " "'{c}'.".format(c=settings_path)) return with open(settings_path) as f: try: settings = yaml.safe_load(f.read())
def get_optional_decision_event(self, event_id) -> HistoryEvent: index = event_id - self.next_decision_event_id if index < 0 or index >= len(self.decision_events): return None else: return self.decision_events[index] class Status(Enum): CREATED = 1 RUNNING = 2 DONE = 3 current_task: contextvars.ContextVar = contextvars.ContextVar("current_task") @dataclass class ITask: decider: ReplayDecider = None task: Task = None status: Status = Status.CREATED awaited: Future = None def is_done(self): return self.status == Status.DONE def destroy(self): if self.status == Status.RUNNING: self.status = Status.DONE
import random import trio import contextvars request_info = contextvars.ContextVar("request_info") # Example logging function that tags each line with the request identifier. def log(msg): # Read from task-local storage: request_tag = request_info.get() print("request {}: {}".format(request_tag, msg)) # An example "request handler" that does some work itself and also # spawns some helper tasks to do some concurrent work. async def handle_request(tag): # Write to task-local storage: request_info.set(tag) log("Request handler started") await trio.sleep(random.random()) async with trio.open_nursery() as nursery: nursery.start_soon(concurrent_helper, "a") nursery.start_soon(concurrent_helper, "b") await trio.sleep(random.random()) log("Request received finished") async def concurrent_helper(job):
__version__ = "0.1.0" __all__ = [ "main", "cmd", "arg", "env", "run", "proc", "to_string", "copy", "python", "once", "is_up_to_date", ] _env = contextvars.ContextVar("makeutils_env") def main(args: Optional[List[Any]] = None, *, module=None) -> Any: """Run the commandline interface Parameters: * `args`: if given a list of arguments to pass to the commandline parser * `module`: the module object that contains the commands. If not given, it defaults to the currently executing script (`__main__`) This function returns the return value of the command function. """ if module is None: module = __main__
from ..model import (Model, build_named_node_tree, FreeRV, ObservedRV, MultiObservedRV, ContextMeta) from ..vartypes import string_types, theano_constant from .shape_utils import ( to_tuple, get_broadcastable_dist_samples, broadcast_dist_samples_shape, ) __all__ = [ 'DensityDist', 'Distribution', 'Continuous', 'Discrete', 'NoDistribution', 'TensorType', 'draw_values', 'generate_samples' ] vectorized_ppc = contextvars.ContextVar( 'vectorized_ppc', default=None) # type: contextvars.ContextVar[Optional[Callable]] class _Unpickling: pass class Distribution: """Statistical distribution""" def __new__(cls, name, *args, **kwargs): if name is _Unpickling: return object.__new__(cls) # for pickle try: model = Model.get_context() except TypeError:
def __init_subclass__(cls, **kwargs): cls.__context_instance = contextvars.ContextVar( f"instance_{cls.__name__}") return cls
T = typing.TypeVar('T') MAX_WS_MESSAGE_SIZE = 2**24 devtools = None version = None def import_devtools(ver): global devtools global version version = ver devtools = importlib.import_module( "selenium.webdriver.common.devtools.v{}".format(version)) _connection_context: contextvars.ContextVar = contextvars.ContextVar( 'connection_context') _session_context: contextvars.ContextVar = contextvars.ContextVar( 'session_context') def get_connection_context(fn_name): ''' Look up the current connection. If there is no current connection, raise a ``RuntimeError`` with a helpful message. ''' try: return _connection_context.get() except LookupError: raise RuntimeError( f'{fn_name}() must be called in a connection context.')
def __init__(self): self._toplevel_context = contextvars.ContextVar( "_toplevel_context", default=None ) self._context = contextvars.ContextVar("_context", default=None)
are represented by objects in this module. For example `pygraphblas.semiring.PLUS_TIMES_INT64`. """ import sys import re import contextvars from itertools import chain from collections import defaultdict from .base import lib, ffi, _check from .monoid import Monoid from . import types current_semiring = contextvars.ContextVar("current_semiring") __all__ = ["Semiring", "current_semiring"] class Semiring: __slots__ = ("name", "semiring", "token", "pls", "mul", "type") def __init__(self, pls, mul, typ, semiring, udt=None): self.pls = pls self.mul = mul self.type = typ self.name = "_".join((pls, mul, typ)) self.semiring = semiring self.token = None
"""Scryfall retrieval and processing.""" import configparser import contextvars import logging from typing import Any, Dict, List, Optional, Set, Tuple import mtgjson4 from mtgjson4 import util import requests import requests.adapters LOGGER = logging.getLogger(__name__) SESSION: contextvars.ContextVar = contextvars.ContextVar("SESSION_SCRYFALL") SCRYFALL_API_SETS: str = "https://api.scryfall.com/sets/" SCRYFALL_API_CARD: str = "https://api.scryfall.com/cards/" PROVIDER_ID = "sf" def __get_session() -> requests.Session: """Get or create a requests session for scryfall.""" session: Optional[requests.Session] = SESSION.get(None) if session is None: session = requests.Session() if mtgjson4.CONFIG_PATH.is_file(): # Open and read MTGJSON secret properties config = configparser.RawConfigParser() config.read(mtgjson4.CONFIG_PATH) header_auth = {
import contextvars from typing import TYPE_CHECKING, Any, Type if TYPE_CHECKING: from _garnet.events.handler import EventHandler HandlerCtx: "contextvars.ContextVar[Type[EventHandler[Any]]]" = ( contextvars.ContextVar("handler"))
"""CardHoarder retrieval and processing.""" import configparser import contextvars import datetime import logging from typing import Any, Dict, List, Optional import requests import dateutil.relativedelta import mtgjson4 from mtgjson4 import util LOGGER = logging.getLogger(__name__) SESSION: contextvars.ContextVar = contextvars.ContextVar("SESSION_CARDHOARDER") SESSION_TOKEN: contextvars.ContextVar = contextvars.ContextVar("CH_TOKEN") CH_API_URL: str = "https://www.cardhoarder.com/affiliates/pricefile/{}" CH_PRICE_DATA: Dict[str, Dict[str, str]] = {} GH_API_USER = "" GH_API_KEY = "" GH_DB_KEY = "" GH_DB_URL = "" GH_DB_FILE = "" TODAY_DATE = datetime.datetime.today().strftime("%Y-%m-%d") def __get_session() -> requests.Session:
import asyncio import contextvars # declare context var request_id = contextvars.ContextVar('Id of request.') async def some_inner_coroutine(): # get value print('Processed inner coroutine of request: {}'.format(request_id.get())) async def some_outer_coroutine(req_id): # set value request_id.set(req_id) await some_inner_coroutine() # get value print('Processed outer coroutine of request: {}'.format(request_id.get())) async def main(): tasks = [] for req_id in range(1, 5): tasks.append(asyncio.create_task(some_outer_coroutine(req_id))) await asyncio.gather(*tasks) if __name__ == '__main__':