Esempio n. 1
0
    def __init__(self, room_address, db_name='./MusicBotDB.json'):
        self.loop = asyncio.get_event_loop()
        self.action_queue = asyncio.JoinableQueue()
        self.packet_queue = asyncio.JoinableQueue()
        self.ws_queue = asyncio.JoinableQueue()

        self.action_task = None
        self.recv_task = None
        self.running_actions = []

        self.room_address = room_address
        self.ws = None
        self.db = TinyDB(db_name)
        self.mid = 0

        self.next_ping_time = int(time())
        self.last_latency = None
        self.last_ping_log = int(time())

        self.reset_mid()
        ### middleware

        self.middleware = {}
        self.queues = {}
        self.packet_queues = []

        self.add_middleware(LoggerMiddleware())
        self.log_queue = self.get_input_queue(LoggerMiddleware.TAG)

        signal.signal(signal.SIGINT, self.sigint_handler())
        self.closing = False
Esempio n. 2
0
    def start(self):
        start = time.time()
        logger.info("Starting pipeline")
        requester = self.requester or Requester(
            error_contents=self.error_contents,
            not_found_contents=self.not_found_contents)

        futures, processes = [], []
        if isinstance(self.input, asyncio.Queue):
            input_q = self.input
        else:
            input_q = asyncio.JoinableQueue(maxsize=10)

        process_queues = [input_q] + [
            asyncio.JoinableQueue(5) for _ in range(len(self.processes))
        ]

        logger.info("Created {0} queues".format(len(process_queues)))

        for idx, process_cls in enumerate(self.processes):
            process = process_cls(process_queues[idx], process_queues[idx + 1],
                                  requester, self)

            if isinstance(process, (Scraper, Pipeline)) and self.prepend_host:
                process.set_host(self.prepend_host)

            logger.info("Starting process {0}".format(process))
            processes.append(process)
            futures.append(process.start())

        if not isinstance(self.input, asyncio.Queue):
            logger.info("Using iterable input queue")

            @asyncio.coroutine
            def _input_func():
                for item in self.input:
                    yield from input_q.put(item)
                logger.info("Input queue drained")

                yield from input_q.put(QueueDone)

            futures.append(_input_func())

        @asyncio.coroutine
        def status_task():
            nonlocal process_queues
            while True:
                yield from asyncio.sleep(1)
                for p in processes:
                    print("{0:<20s}: {1:>6d}: {2}".format(
                        p.__class__.__name__, p.processed,
                        {k: v
                         for k, v in p.errors.items() if v != 0}))
                print(" ")

        if "display" in self.plugins:
            asyncio. async (status_task())

        [asyncio. async (f) for f in futures]
Esempio n. 3
0
    def test_task_done(self):
        q = asyncio.JoinableQueue(loop=self.loop)
        for i in range(100):
            q.put_nowait(i)

        accumulator = 0

        # Two workers get items from the queue and call task_done after each.
        # Join the queue and assert all items have been processed.
        running = True

        @asyncio.coroutine
        def worker():
            nonlocal accumulator

            while running:
                item = yield from q.get()
                accumulator += item
                q.task_done()

        @asyncio.coroutine
        def test():
            for _ in range(2):
                asyncio.Task(worker(), loop=self.loop)

            yield from q.join()

        self.loop.run_until_complete(test())
        self.assertEqual(sum(range(100)), accumulator)

        # close running generators
        running = False
        for i in range(2):
            q.put_nowait(0)
Esempio n. 4
0
    def __init__(self, loop):
        try:
            self.queue = asyncio.JoinableQueue(loop=loop)
        except AttributeError:
            self.queue = asyncio.Queue(loop=loop)

        self.closed = False
Esempio n. 5
0
    def __init__(self, conn_factory, logger, staging_tbl, proxy=None):
        logger.info('Initializing AsyncScraper')
        self._conn_factory = conn_factory
        self._logger = logger
        self._staging_tbl = staging_tbl
        self._proxy = proxy

        if hasattr(asyncio.Queue, 'join'):
            jobsq = asyncio.Queue()
        else:
            jobsq = asyncio.JoinableQueue()
        self._jobsq = jobsq

        errors = {}

        # Set up the queue and the listener to stage the records
        logger.debug('Initializing Queue')
        self._db_queue = queue.Queue()
        Worker = RecordWorker(self._db_queue, conn_factory,
                              models.staging_table, models.log_table, True)
        work_thread = Thread(target=Worker.record_inserter)
        self._dbworker = Worker

        work_thread.start()
        logger.debug('Queue initialized')
Esempio n. 6
0
 def __init__(self):
     super(BotMiddleware, self).__init__()
     self.input = asyncio.JoinableQueue()
     self._output = {}
     self.task = None
     self.closing = False
     self._recv_functions = {}
     self._exception_handler = None
     self._add_routes()
Esempio n. 7
0
 def __init__(self):
     super(PlayQueuedSongsMiddleware, self).__init__()
     self.message_queue = asyncio.JoinableQueue()
     self.song_queue = []
     self.current_song = None
     self.play_callback = None
     # queued a song, waiting to see if it turns up
     self.expecting_song = False
     self.in_backlog = False
     UsesCommands.set_handler(self, self.handle_event)
Esempio n. 8
0
    def test_join_empty_queue(self):
        q = asyncio.JoinableQueue(loop=self.loop)

        # Test that a queue join()s successfully, and before anything else
        # (done twice for insurance).

        @asyncio.coroutine
        def join():
            yield from q.join()
            yield from q.join()

        self.loop.run_until_complete(join())
Esempio n. 9
0
    def __init__(self, seed,
                 loop=None,
                 concurrency=10):
        self.seed = seed
        self.loop = loop or asyncio.get_event_loop()
        self.concurrency = concurrency
        self.q = asyncio.JoinableQueue()
        self.visited = set()
        self.done = set()

        self.fp = open("produtos.csv", "w")
        self.csv = csv.DictWriter(self.fp,
                                  fieldnames=Page._fields)
        self.csv.writeheader()

        self.add_link(seed)
Esempio n. 10
0
 def __init__(
         self,
         roots,
         exclude=None,
         strict=True,  # What to crawl.
         max_redirect=10,
         max_tries=4,  # Per-url limits.
         max_tasks=10,
         *,
         loop=None):
     self.loop = loop or asyncio.get_event_loop()
     self.roots = roots
     self.exclude = exclude
     self.strict = strict
     self.max_redirect = max_redirect
     self.max_tries = max_tries
     self.max_tasks = max_tasks
     self.q = asyncio.JoinableQueue(loop=self.loop)
     self.urls = set()
     self.done = []
     self.connector = aiohttp.TCPConnector(loop=self.loop)
     self.root_domains = set()
     for root in roots:
         parts = urllib.parse.urlparse(root)
         host, port = urllib.parse.splitport(parts.netloc)
         if not host:
             continue
         if re.match(r'\A[\d\.]*\Z', host):
             self.root_domains.add(host)
         else:
             host = host.lower()
             if self.strict:
                 self.root_domains.add(host)
             else:
                 self.root_domains.add(lenient_host(host))
     for root in roots:
         self.add_url(root)
     self.t0 = time.time()
     self.t1 = None
Esempio n. 11
0
    def test_format(self):
        q = asyncio.JoinableQueue(loop=self.loop)
        self.assertEqual(q._format(), 'maxsize=0')

        q._unfinished_tasks = 2
        self.assertEqual(q._format(), 'maxsize=0 tasks=2')
Esempio n. 12
0
 def test_task_done_underflow(self):
     q = asyncio.JoinableQueue(loop=self.loop)
     self.assertRaises(ValueError, q.task_done)
Esempio n. 13
0
 def __init__(self, loop):
     self._loop = loop
     try:
         self.queue = asyncio.JoinableQueue(loop=loop)
     except AttributeError:
         self.queue = asyncio.Queue(loop=loop)
Esempio n. 14
0
 def __init__(self, pipelines):
     self._pipelines = pipelines
     self._queue = asyncio.JoinableQueue()
Esempio n. 15
0
def JoinableQueue(loop):
    """Create a JoinableQueue, even in new Python where it is called Queue."""
    try:
        return asyncio.JoinableQueue(loop=loop)
    except AttributeError:
        return asyncio.Queue(loop=loop)