Esempio n. 1
0
    def __init__(self, robots):
        self.q = asyncio.PriorityQueue()
        self.ridealong = {}
        self.awaiting_work = 0
        self.maxhostqps = None
        self.delta_t = None
        self.next_fetch = cachetools.ttl.TTLCache(
            10000, 10)  # 10 seconds good enough for QPS=0.1 and up
        self.frozen_until = cachetools.ttl.TTLCache(
            10000, 10)  # 10 seconds is longer than our typical delay
        self.maxhostqps = float(config.read('Crawl', 'MaxHostQPS'))
        self.delta_t = 1. / self.maxhostqps
        self.initialize_budgets()
        self.robots = robots

        memory.register_debug(self.memory)
Esempio n. 2
0
    def __init__(self, max_repeat=3, sleep_time=0, max_deep=0, save_pipe=sys.stdout, url_filter=None, loop=None):
        """
        constructor
        """
        BasePool.__init__(self, url_filter=url_filter)

        self.max_repeat = max_repeat        # default: 3, maximum repeat fetching time for a url
        self.sleep_time = sleep_time        # default: 0, sleeping time after a fetching for a url
        self.max_deep = max_deep            # default: 0, if -1, spider will not stop until all urls are fetched
        self.save_pip = save_pipe           # default: sys.stdout, also can be a file handler

        self.loop = loop or asyncio.get_event_loop()            # event_loop from parameter or call get_event_loop()
        self.queue = asyncio.PriorityQueue(loop=self.loop)      # (priority, url, keys, deep, repeat)

        self.start_time = time.time()       # start time of this pool
        return
Esempio n. 3
0
    def __init__(self, name):
        """BaseTask for the all TaskManager Task classes.

        Args:
            name (str): Name of the Task.
        """

        self.name = name
        self._task = None
        self._tm = TaskManager()
        self._loop = self._tm.get_event_loop()
        self._queue = asyncio.PriorityQueue()
        self._context = {
            'tq': self._queue,
            'eliot_task': current_action().serialize_task_id()
        }
        self.logger = logging.getLogger('aiotaskmgr.TaskManager.Task')
Esempio n. 4
0
    def __init__(self, *args, loop=None, **kwargs):
        kwargs.pop('shard_id', None)
        self.shard_ids = kwargs.pop('shard_ids', None)
        super().__init__(*args, loop=loop, **kwargs)

        if self.shard_ids is not None:
            if self.shard_count is None:
                raise ClientException('When passing manual shard_ids, you must provide a shard_count.')
            elif not isinstance(self.shard_ids, (list, tuple)):
                raise ClientException('shard_ids parameter must be a list or a tuple.')

        # instead of a single websocket, we have multiple
        # the key is the shard_id
        self.__shards = {}
        self._connection._get_websocket = self._get_websocket
        self._connection._get_client = lambda: self
        self.__queue = asyncio.PriorityQueue()
Esempio n. 5
0
    def __init__(self,
                 users=None,
                 *,
                 loop=None,
                 block_size=DEFAULT_BLOCK_SIZE,
                 socket_timeout=None,
                 idle_timeout=None,
                 wait_future_timeout=1,
                 path_timeout=None,
                 path_io_factory=pathio.PathIO,
                 maximum_connections=None,
                 read_speed_limit=None,
                 write_speed_limit=None,
                 read_speed_limit_per_connection=None,
                 write_speed_limit_per_connection=None,
                 data_ports=None,
                 encoding="utf-8"):
        self.loop = loop or asyncio.get_event_loop()
        self.block_size = block_size
        self.socket_timeout = socket_timeout
        self.idle_timeout = idle_timeout
        self.wait_future_timeout = wait_future_timeout
        self.path_io_factory = path_io_factory
        self.path_timeout = path_timeout
        if data_ports is not None:
            self.available_data_ports = asyncio.PriorityQueue(loop=self.loop)
            for data_port in data_ports:
                self.available_data_ports.put_nowait((0, data_port))
        else:
            self.available_data_ports = None

        if isinstance(users, AbstractUserManager):
            self.user_manager = users
        else:
            self.user_manager = MemoryUserManager(users, loop=self.loop)

        self.available_connections = AvailableConnections(maximum_connections)
        self.throttle = StreamThrottle.from_limits(read_speed_limit,
                                                   write_speed_limit,
                                                   loop=self.loop)
        self.throttle_per_connection = StreamThrottle.from_limits(
            read_speed_limit_per_connection,
            write_speed_limit_per_connection,
            loop=self.loop)
        self.throttle_per_user = {}
        self.encoding = encoding
Esempio n. 6
0
async def scheduler(*,
                    queue_size: int = 10,
                    loop: asyncio.AbstractEventLoop = None,
                    app=None):
    """Launch the task manager."""
    if loop is None:
        loop = asyncio.get_event_loop()

    try:
        tasks_queue: asyncio.PriorityQueue = asyncio.PriorityQueue(
            maxsize=queue_size)
        worker_task = asyncio.ensure_future(worker(tasks_queue, app=app))

        async def on_message(msg):
            """Add tasks to the queue."""
            queue = msg.proto.subject
            log.info(f"Received a new event: {queue} - {msg.seq}")

            body = msgpack.unpackb(msg.data, encoding="utf-8")

            # Add tasks to the queue
            for func in _subscribers.get(queue, []):
                priority = body.get("priority", 1)
                entry = (priority, func, queue, body)
                log.info(
                    f"Create a new task: `{func.__name__}` - priority {priority}"
                )
                await tasks_queue.put(entry)

        log.info("Loading subscribers....")
        for queue_name, funcs in _subscribers.items():
            # add a callback when you receive a message
            await streaming.subscribe(queue_name, on_message)
            for func in funcs:
                log.info(
                    f"Function `{func.__name__}` subscribed on `{queue_name}`")

        # really, really ugly
        while True:
            await asyncio.sleep(1)

    except CancelledError:
        log.warning("Closing tasks....")
        while tasks_queue.empty() is not True:
            await asyncio.sleep(0)
        worker_task.cancel()
Esempio n. 7
0
    def __init__(self, fetcher, parser, saver, url_filter=None, loop=None):
        """
        constructor
        """
        BasePool.__init__(self, url_filter=url_filter)

        self._loop = loop or asyncio.get_event_loop(
        )  # event_loop from parameter or call asyncio.get_event_loop()
        self._queue = asyncio.PriorityQueue(
            loop=self._loop)  # (priority, url, keys, deep, repeat)

        self._fetcher = fetcher  # fetcher instance
        self._parser = parser  # parser instance
        self._saver = saver  # saver instance

        self._start_time = None  # start time of this pool
        return
Esempio n. 8
0
 def __init__(self, bot):
     self.current = None
     self.voice = None
     self.bot = bot
     self.play_next_song = asyncio.Event()
     self.skip_votes = set()  # a set of user_ids that voted
     self.audio_player = self.bot.loop.create_task(self.audio_player_task())
     self.playerheat = {
     }  # keep track of how often each user requests. -------------
     self.queue = []  # easily track the songs without messing with threads
     if self.bot.music_priorityqueue:
         self.songs = asyncio.PriorityQueue(
         )  # gotta keep priority -----------------
     else:
         self.songs = asyncio.Queue()
     main_loop = asyncio.get_event_loop()
     main_loop.create_task(self.loop_cooldown())
Esempio n. 9
0
 def __init__(self, client_id, client_secret, storage_path):
     self.client_id = client_id
     self.client_secret = client_secret
     self.storage_path = storage_path
     self.access_token = None
     self.refresh_token = None
     self.device_id = ""
     self.playlist = set()
     self.current_track = None
     self.queued_tracks = set()
     self.refresh_task = None
     self.play_next_task = None
     self.loop = asyncio.new_event_loop()
     self.queue = asyncio.PriorityQueue(loop=self.loop)
     self.thread = threading.Thread(target=self.entry_point)
     self.thread.start()
     asyncio.run_coroutine_threadsafe(self.start_async(),
                                      self.loop).result()
Esempio n. 10
0
    async def test_queue(self):
        dev = QueueMixin()
        dev.queue = asyncio.PriorityQueue()
        tasks = []
        worker = asyncio.create_task(dev.queue_worker(dev.queue))
        await asyncio.sleep(0.001)
        tasks.append(dev.execute_in_queue(dev.queue, self.waiting(2), 10))
        tasks.append(dev.execute_in_queue(dev.queue, self.waiting(1), 5))
        tasks.append(dev.execute_in_queue(dev.queue, self.waiting(6), 1))

        res = await asyncio.gather(*tasks, return_exceptions=True)

        worker.cancel()
        try:
            await worker
        except asyncio.CancelledError:
            pass
        pass
    async def reload_crawls(self):
        self.__waiting_crawls = asyncio.PriorityQueue(
            maxsize=MAX_WAITING_CRAWLS_QUEUE_SIZE)
        while self.__crawls_not_in_queue_num > 0:
            await self.__waiting_crawls.get()

        links = get_all_links()
        for link in links:
            for script in link.scripts:
                await self.add_crawl(
                    CrawlData(
                        name=script.script_name,
                        url=link.url,
                        period=script.period,
                        email=script.notifications[0].address,
                        crawl_id=link.link_id,
                        xpath=script.instructions,
                        element_value=script.element_value,
                    ))
Esempio n. 12
0
 def __init__(
     self,
     hass: HomeAssistant,
     client: AzureEventHubClient,
     entities_filter: vol.Schema,
     send_interval: int,
     max_delay: int | None = None,
 ) -> None:
     """Initialize the listener."""
     self.hass = hass
     self.queue: asyncio.PriorityQueue[  # pylint: disable=unsubscriptable-object
         tuple[int, tuple[float, Event | None]]] = asyncio.PriorityQueue()
     self._client = client
     self._entities_filter = entities_filter
     self._send_interval = send_interval
     self._max_delay = max_delay if max_delay else DEFAULT_MAX_DELAY
     self._listener_remover: Callable[[], None] | None = None
     self._next_send_remover: Callable[[], None] | None = None
     self.shutdown = False
Esempio n. 13
0
async def main():
    queue = asyncio.PriorityQueue()

    # fire up the both producers and consumers
    producers = [asyncio.create_task(producer(queue)) for _ in range(5)]
    consumers = [asyncio.create_task(consumer(queue)) for _ in range(1)]

    # with both producers and consumers running, wait for
    # the producers to finish

    await asyncio.gather(*producers)
    print('---- done producing')

    # wait for the remaining tasks to be processed
    await queue.join()

    # cancel the consumers, which are now idle
    for c in consumers:
        c.cancel()
Esempio n. 14
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        # Read env vars
        self.bot_stuff_channel = int(os.getenv("EPIC_RPG_BOT_CHANNEL"))
        self.work_command = os.getenv("WORK_COMMAND")
        self.hp_threshold = int(os.getenv("HP_THRESHOLD"))

        # Set up logging
        self.logger = logging.getLogger("discord")
        self.logger.setLevel(logging.DEBUG)
        self.handler = logging.FileHandler(filename="log/erpgbot.log",
                                           encoding="utf-8",
                                           mode="w")
        self.handler.setFormatter(
            logging.Formatter(
                "%(asctime)s:%(levelname)s:%(name)s: %(message)s"))
        self.logger.addHandler(self.handler)

        # Set up our queue
        self.msg_queue = asyncio.PriorityQueue()

        # Initialize an empty variable to hold the channel ID.  We set this at 0
        # to start so that we can wait for the channels to be populated.  Normally
        # these would get populated after waiting for ready, but this doesn't
        # work when bot=False is set in the client at runtime
        self.channel = 0

        # Set up our handler classes (if they're classes)
        self.training_handler = HandleTraining()

        # Keep a list of various actions to perform with our queue handler
        self.ready_actions_todo = []
        self.inventory_actions_todo = []

        # Keep an up-to-date dictionary of our player's inventory
        self.player_inventory = {}

        # create our background tasks
        self.loop.create_task(self.perform_action_handler())
        self.loop.create_task(self.perform_ready_check_handler())
        self.loop.create_task(self.perform_inventory_handler())
Esempio n. 15
0
async def test_worker():
    queue = asyncio.PriorityQueue(maxsize=10)
    check = None

    async def add_2_numbers(topic, data, app):
        nonlocal check
        check = "TaskDone"
        await asyncio.sleep(2)
        return check

    worker_task = asyncio.ensure_future(worker(queue))

    entry = (1, add_2_numbers, 'my.event', {})
    await queue.put(entry)
    while queue.empty() is not True:
        await asyncio.sleep(0)

    worker_task.cancel()

    assert check == "TaskDone"
Esempio n. 16
0
 def __init__(
     self,
     hass: HomeAssistant,
     client_args: Dict[str, Any],
     conn_str_client: bool,
     entities_filter: vol.Schema,
     send_interval: int,
     max_delay: int,
 ):
     """Initialize the listener."""
     self.hass = hass
     self.queue = asyncio.PriorityQueue()
     self._client_args = client_args
     self._conn_str_client = conn_str_client
     self._entities_filter = entities_filter
     self._send_interval = send_interval
     self._max_delay = max_delay + send_interval
     self._listener_remover = None
     self._next_send_remover = None
     self.shutdown = False
Esempio n. 17
0
 def __init__(
     self,
     opp: OpenPeerPower,
     client_args: dict[str, Any],
     conn_str_client: bool,
     entities_filter: vol.Schema,
     send_interval: int,
     max_delay: int,
 ) -> None:
     """Initialize the listener."""
     self.opp = opp
     self.queue = asyncio.PriorityQueue()
     self._client_args = client_args
     self._conn_str_client = conn_str_client
     self._entities_filter = entities_filter
     self._send_interval = send_interval
     self._max_delay = max_delay + send_interval
     self._listener_remover = None
     self._next_send_remover = None
     self.shutdown = False
Esempio n. 18
0
    def __init__(self, *args, loop=None, **kwargs):
        kwargs.pop('shard_id', None)
        self.shard_ids = kwargs.pop('shard_ids', None)
        super().__init__(*args, loop=loop, **kwargs)

        if self.shard_ids is not None:
            if self.shard_count is None:
                raise ClientException('When passing manual shard_ids, you must provide a shard_count.')
            elif not isinstance(self.shard_ids, (list, tuple)):
                raise ClientException('shard_ids parameter must be a list or a tuple.')

        self._connection = AutoShardedConnectionState(dispatch=self.dispatch,
                                                      handlers=self._handlers, syncer=self._syncer,
                                                      hooks=self._hooks, http=self.http, loop=self.loop, **kwargs)

        # instead of a single websocket, we have multiple
        # the key is the shard_id
        self.__shards = {}
        self._connection._get_websocket = self._get_websocket
        self.__queue = asyncio.PriorityQueue()
Esempio n. 19
0
 def __init__(self,
              clients: Optional[Iterable[Client]] = None,
              all_off_on_close: Optional[bool] = False):
     self.clients = set()
     if clients is not None:
         for client in clients:
             self.clients.add(client)
     self.all_off_on_close = all_off_on_close
     self.screens = {}
     self.tallies = {}
     self.running = False
     self.loop = asyncio.get_event_loop()
     screen = self.broadcast_screen = Screen.broadcast()
     assert screen.is_broadcast
     self.screens[screen.index] = screen
     self._bind_screen(screen)
     self.update_queue = asyncio.PriorityQueue()
     self.update_task = None
     self.tx_task = None
     self.connected_evt = asyncio.Event()
     self._tx_lock = asyncio.Lock()
Esempio n. 20
0
 def __init__(
     self,
     hass: HomeAssistant,
     client_args: dict[str, Any],
     conn_str_client: bool,
     entities_filter: vol.Schema,
     send_interval: int,
     max_delay: int,
 ) -> None:
     """Initialize the listener."""
     self.hass = hass
     self.queue: asyncio.PriorityQueue[  # pylint: disable=unsubscriptable-object
         tuple[int, tuple[float, Event | None]]] = asyncio.PriorityQueue()
     self._client_args = client_args
     self._conn_str_client = conn_str_client
     self._entities_filter = entities_filter
     self._send_interval = send_interval
     self._max_delay = max_delay + send_interval
     self._listener_remover: Callable[[], None] | None = None
     self._next_send_remover: Callable[[], None] | None = None
     self.shutdown = False
Esempio n. 21
0
    def __init__(
        self,
        hass: HomeAssistant,
        entry: ConfigEntry,
        entities_filter: vol.Schema,
    ) -> None:
        """Initialize the listener."""
        self.hass = hass
        self._entry = entry
        self._entities_filter = entities_filter

        self._client = AzureEventHubClient.from_input(**self._entry.data)
        self._send_interval = self._entry.options[CONF_SEND_INTERVAL]
        self._max_delay = self._entry.options.get(CONF_MAX_DELAY,
                                                  DEFAULT_MAX_DELAY)

        self._shutdown = False
        self._queue: asyncio.PriorityQueue[tuple[int, tuple[
            datetime, State | None]]] = asyncio.PriorityQueue()
        self._listener_remover: Callable[[], None] | None = None
        self._next_send_remover: Callable[[], None] | None = None
Esempio n. 22
0
 def __init__(self, number, port, pin, socket, BAUDRATE = 115200):
     Thread.__init__(self)
     self.number = number
     self.port = port
     self.pin = pin
     self.socket = socket
     self.status = None
     self.BAUDRATE = BAUDRATE
     self.queue = asyncio.PriorityQueue()
     logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG)
     self.modem = Modem(self.port, self.BAUDRATE, smsReceivedCallbackFunc=None)
     try:
         self.modem.connect(pin=pin, waitingForModemToStartInSeconds=2) if self.pin else self.modem.connect(waitingForModemToStartInSeconds=2)
     except TimeoutException as e:
         self.status = 'Timeout Exception: Unable to connect to modem. Check that it is powered on and connected.'
     except Exception as e:
         logging.error('at %s', 'SerialListener.__init__', exc_info=e)
         self.status = repr(e)
     else:
         asyncio.create_task(self.pause_queue_worker())
         asyncio.create_task(self.queue_worker(self.queue))
Esempio n. 23
0
async def worker_entrypoint(loop):
    global work_queue, cruncher_ids
    print("initializing work queue.")
    
    work_queue = asyncio.PriorityQueue(loop=loop)
    cruncher_ids = asyncio.Queue(loop=loop)

    with open("availability-zones.json", "r") as f:
        for id, job in enumerate(json.load(f)):
            await work_queue.put((0, id, job))
    
    print("\tLoaded %d jobs." % (work_queue.qsize(),))

    # spawn off a bunch of workers pulling from the work queue...
    print("\tfetch_workers")
    for x in range(0, config["fetch_workers"]):
        print("\tstarted a worker... %d" % x)
        asyncio.ensure_future(Worker(x, work_queue, loop).run())

    for x in range(0, config["crunch_workers"]):
        await cruncher_ids.put(x)
Esempio n. 24
0
async def main():
    task_queue = asyncio.PriorityQueue(20)

    total_sleep_time = 0
    #    for _ in range(20):
    #        sleep_for = random.uniform(1, 3)
    #        total_sleep_time += sleep_for
    #        task_queue.put_nowait((1, sleep_for))

    tasks = []
    for i in range(3):
        worker_name = "Worker-%i" % i
        task = asyncio.create_task(worker(worker_name, task_queue))
        tasks.append(task)

    for _ in range(20):
        sleep_for = random.uniform(2, 5)
        total_sleep_time += sleep_for
        task_queue.put_nowait((1, sleep_for))
        await asyncio.sleep(1)

    # Wait until queue is fully processed


#    started_at = time.monotonic()
#    await task_queue.join()
#    total_slept_for = time.monotonic() - started_at
#    print("Total sleep time: %i, Total slept for: %i" % \
#        (total_sleep_time, total_slept_for))

# Cancel our worker tasks.
#    for task in tasks:
#        task.cancel()
# Wait until all worker tasks are cancelled.
    await asyncio.gather(*tasks, return_exceptions=True)

    print('====')
    print(f'3 workers slept in parallel for {total_slept_for:.2f} seconds')
    print(f'total expected sleep time: {total_sleep_time:.2f} seconds')
Esempio n. 25
0
 def __init__(self, bot):
     self.bot = bot
     self.db = self.bot.database.db.gw2
     with open("cogs/guildwars2/gamedata.json", encoding="utf-8",
               mode="r") as f:
         self.gamedata = json.load(f)
     with open("cogs/guildwars2/instabilities.json",
               encoding="utf-8",
               mode="r") as f:
         self.instabilities = json.load(f)
     self.session = bot.session
     self.boss_schedule = self.generate_schedule()
     self.embed_color = 0xc12d2b
     self.log = logging.getLogger(__name__)
     self.tasks = []
     self.waiting_for = []
     self.emojis = {}
     self.chatcode_preview_opted_out_guilds = set()
     try:
         self.font = ImageFont.truetype("GWTwoFont1p1.ttf", size=30)
     except IOError:
         self.font = ImageFont.load_default()
     setup_tasks = [
         self.prepare_emojis, self.prepare_linkpreview_guild_cache
     ]
     self.guildsync_entry_number = 0
     self.guildsync_queue = asyncio.PriorityQueue()
     for task in setup_tasks:
         bot.loop.create_task(task())
     self.tasks = [
         self.game_update_checker, self.daily_checker, self.news_checker,
         self.gem_tracker, self.world_population_checker,
         self.guild_synchronizer, self.boss_notifier,
         self.forced_account_names, self.event_reminder_task,
         self.worldsync_task, self.guildsync_consumer
     ]
     for task in self.tasks:
         task.start()
Esempio n. 26
0
    def __init__(self,
                 connections,
                 dead_timeout=60,
                 timeout_cutoff=5,
                 selector_class=RoundRobinSelector,
                 randomize_hosts=True,
                 *,
                 loop,
                 **kwargs):
        self._dead_timeout = dead_timeout
        self.timeout_cutoff = timeout_cutoff
        self.connection_opts = connections
        self.connections = [c for (c, _) in connections]
        self.orig_connections = tuple(self.connections)
        self.dead = asyncio.PriorityQueue(len(self.connections), loop=loop)
        self.dead_count = collections.Counter()

        self.loop = loop

        if randomize_hosts:
            random.shuffle(self.connections)

        self.selector = selector_class(dict(connections))
Esempio n. 27
0
    def __init__(self, start: StrOrUrl, target: StrOrUrl, *, concurrent: int = 25, keywords: List[str] = None) -> None:
        name = type(self).__name__
        logger.debug(f'Initializing {name}')
        logger.debug(f'{name} will start at "{start}" and will stop at "{target}"')
        logger.debug(f'{name} will use {concurrent} concurrent tasks')

        if keywords is not None:
            to_print = '\n'.join([f'\t{num}) {kw}' for num, kw in enumerate(keywords)])
            logger.debug(f'{name} prioritizing links by keywords:\n{to_print}')
            self._keywords = keywords
        else:
            self._keywords = []

        self._start = URL(start)
        self._target = URL(target)
        self._queue = asyncio.PriorityQueue()
        self._graph = defaultdict(set)
        self._session = aiohttp.ClientSession()
        self._semaphore = asyncio.Semaphore(concurrent)
        self._concurrent = concurrent
        self._tasks = []
        self._target_found = asyncio.Event()
        logger.debug(f'{type(self).__name__} initialization complete')
Esempio n. 28
0
    def __init__(
        self,
        id: str,
        url: str,
        request_qty: int,
        request_duration: float,
        *args,
        max_batch_size: Optional[int] = None,
        timeout: float = 60.0,
        preproc: Callable = anull,
        postproc: Callable = anull,
        logger: logging.Logger = None,
        **kwargs,
    ):
        """Initialize."""
        self.id = id
        self.worker: Optional[Task] = None
        self.request_queue = asyncio.PriorityQueue()
        self.counter = itertools.count()
        self.url = url
        self.request_qty = request_qty
        self.request_duration = datetime.timedelta(seconds=request_duration)
        self.timeout = timeout
        self.max_batch_size = max_batch_size
        self.preproc = preproc
        self.postproc = postproc
        self.use_cache = settings.use_cache
        if logger is None:
            logger = logging.getLogger(__name__)
        self.logger = logger

        # locking cache needs to be here so each KP instance has its own cache.
        # https://stackoverflow.com/a/14946506
        if self.use_cache:
            self.query = async_locking_cache(self._query)
        else:
            self.query = self._query
Esempio n. 29
0
 async def _main(self):
     self.logger.info('AioSpider started!!!!!Use memory queue..')
     self.queue = asyncio.PriorityQueue(maxsize=1000000)
     self.client = aiohttp.ClientSession()
     await self.before_start()
     for _signal in (SIGINT, SIGTERM):
         self.loop.add_signal_handler(
             _signal, lambda: asyncio.create_task(self._stop(_signal)))
     # await self._load_task()
     if self.queue.empty():
         async for task in self.start():
             await self._add_task(task)
     if self.cookies:
         workers = [
             asyncio.create_task(self._workflow(self.sem, cookie))
             for cookie in self.cookies
         ]
     else:
         workers = [
             asyncio.create_task(self._workflow(self.sem))
             for _ in range(self.concurrency)
         ]
     await self.queue.join()
     await self._stop(SIGTERM)
Esempio n. 30
0
 def __init__(self, *args, **kwargs):
     self._inner = asyncio.PriorityQueue(*args, **kwargs)