示例#1
0
async def producer(queue: AsyncQueue):
    print(f"Production start!")
    for i in range(50, 55):
        await asyncio.sleep(1)
        print(f"Produce message {i}")
        queue.add_message(i)
    queue.add_message(None)
    print(f"Production complete!")
示例#2
0
文件: Log.py 项目: Lashchyk/CVSAnalY
    def _read_from_repository(self, new_line_cb, user_data):
        queue = AsyncQueue()
        logreader_thread = threading.Thread(target=self._logreader,
                                            args=(self.repo, queue))
        logreader_thread.setDaemon(True)
        logreader_thread.start()

        # Use the queue with mutexes while the
        # thread is alive
        while logreader_thread.isAlive():
            try:
                line = queue.get(1)
            except TimeOut:
                continue
            new_line_cb(line, user_data)

        # No threads now, we don't need locks
        while not queue.empty_unlocked():
            line = queue.get_unlocked()
            new_line_cb(line, user_data)
示例#3
0
    def _read_from_repository(self, new_line_cb, user_data):
        queue = AsyncQueue()
        logreader_thread = threading.Thread(target=self._logreader,
                                            args=(self.repo, queue))
        logreader_thread.setDaemon(True)
        logreader_thread.start()

        # Use the queue with mutexes while the
        # thread is alive
        while logreader_thread.isAlive():
            try:
                line = queue.get(1)
            except TimeOut:
                continue
            new_line_cb(line, user_data)

        # No threads now, we don't need locks
        while not queue.empty_unlocked():
            line = queue.get_unlocked()
            new_line_cb(line, user_data)
示例#4
0
    def end(self):
        # The log is now in the temp table
        # Retrieve the data now and pass it to
        # the real content handler

        self.templog.flush()
        printdbg("DBProxyContentHandler: parsing finished, creating thread")

        self.db_handler.begin()
        self.db_handler.repository(self.repo_uri)

        queue = AsyncQueue(50)
        reader_thread = threading.Thread(target=self.__reader,
                                         args=(self.templog, queue))
        reader_thread.setDaemon(True)
        reader_thread.start()

        # Use the queue with mutexes while the
        # thread is alive
        while reader_thread.isAlive():
            try:
                item = queue.get(1)
            except TimeOut:
                continue
            printdbg("DBProxyContentHandler: commit: %s", (item.revision, ))
            self.db_handler.commit(item)
            del item

        # No threads now, we don't need locks
        printdbg(
            "DBProxyContentHandler: thread __reader is finished, continue without locks"
        )
        while not queue.empty_unlocked():
            item = queue.get_unlocked()
            self.db_handler.commit(item)
            del item

        self.db_handler.end()
        self.templog.clear()
    def end(self):
        # The log is now in the temp table
        # Retrieve the data now and pass it to
        # the real content handler

        self.templog.flush()
        printdbg("DBProxyContentHandler: parsing finished, creating thread")

        self.db_handler.begin()
        self.db_handler.repository(self.repo_uri)

        queue = AsyncQueue(50)
        reader_thread = threading.Thread(target=self.__reader,
                                          args=(self.templog, queue))
        reader_thread.setDaemon(True)
        reader_thread.start()

        # Use the queue with mutexes while the
        # thread is alive
        while reader_thread.isAlive():
            try:
                item = queue.get(1)
            except TimeOut:
                continue
            printdbg("DBProxyContentHandler: commit: %s", (item.revision,))
            self.db_handler.commit(item)
            del item

        # No threads now, we don't need locks
        printdbg("DBProxyContentHandler: thread __reader is finished, " + \
                 "continue without locks")
        while not queue.empty_unlocked():
            item = queue.get_unlocked()
            self.db_handler.commit(item)
            del item

        self.db_handler.end()
        self.templog.clear()
示例#6
0
    def __init__(self, db):
        self.db = db

        self._need_clear = False

        try:
            self.__create_table()
        except TableAlreadyExists:
            # FIXME: we can use this to recover from a crash
            self._need_clear = True
            self.__drop_table()
            self.__create_table()
        
        self.queue = AsyncQueue(50)
        self.writer_thread = threading.Thread(target=self.__writer,
                                               args=(self.queue,))
        self.writer_thread.setDaemon(True)
        self.writer_thread.start()
示例#7
0
async def Start():
    queue = AsyncQueue()
    await asyncio.gather(
        consumer(queue),
        producer(queue)
    )