Пример #1
0
async def test_simple(loop, timer):
    sleep = aiomisc.threaded(time.sleep)

    with timer(1):
        await asyncio.gather(
            sleep(1),
            sleep(1),
            sleep(1),
            sleep(1),
        )
Пример #2
0
async def test_cancel(executor, loop, timer):
    sleep = aiomisc.threaded(time.sleep)

    async with timeout(2):
        with timer(1, dispersion=2):
            tasks = [loop.create_task(sleep(1)) for _ in range(1000)]

            await asyncio.sleep(1)

            for task in tasks:
                task.cancel()

    executor.shutdown(wait=True)
Пример #3
0
async def test_threaded(executor: aiomisc.ThreadPoolExecutor, timer):
    assert executor

    sleep = aiomisc.threaded(time.sleep)

    with timer(1):
        await asyncio.gather(
            sleep(1),
            sleep(1),
            sleep(1),
            sleep(1),
            sleep(1),
        )
Пример #4
0
class Storage:
    ROTATE_SIZE = 2**20

    def _write_to(self, fp, data, seek=None, truncate=False):
        if seek is not None:
            fp.seek(seek)

        if truncate:
            fp.truncate()

        return fp.write(self.packer.pack(data))

    @staticmethod
    def _read_from(fp, seek=None):
        if seek is not None:
            fp.seek(seek)

        unpacker = msgpack.Unpacker(fp, raw=False)

        while True:
            pos = fp.tell()

            try:
                result = unpacker.unpack()
                cur = unpacker.tell()

                yield result, cur
            except Exception:
                fp.seek(pos)
                raise

    def __init__(self, path):
        self.write_lock = RLock()
        self.meta_lock = RLock()

        self.path = path
        self.write_fp = open(path, "ab")
        self.pos_fp = open("%s.pos" % path, "ab+")

        self.packer = msgpack.Packer(use_bin_type=True)

    def write(self, obj):
        with self.write_lock:
            return self._write_to(self.write_fp, obj)

    def read(self):
        with self.meta_lock:
            with open(self.path, 'rb') as fp:
                self.write_fp.flush()

                try:
                    start_pos, _ = next(self._read_from(self.pos_fp, seek=0))
                except msgpack.OutOfData:
                    start_pos = 0

                try:
                    for item, pos in self._read_from(fp, seek=start_pos):
                        self._write_to(
                            self.pos_fp,
                            start_pos + pos,
                            seek=0,
                            truncate=True,
                        )
                        yield item
                except msgpack.OutOfData:
                    if self.size() > self.ROTATE_SIZE:
                        self.clear()

                    return
                finally:
                    self.pos_fp.flush()

    def size(self):
        return os.stat(self.path).st_size

    def clear(self):
        with self.write_lock, self.meta_lock:

            self.write_fp.seek(0)
            self.write_fp.truncate(0)

            self._write_to(self.pos_fp, 0, 0, truncate=True)

            log.info('Truncating file %r new size is %s', self.path,
                     self.size())

    write_async = threaded(write)
    clear_async = threaded(clear)
    size_async = threaded(size)

    @threaded
    def read_async(self, chunk_size=2000):
        result = []
        for item in self.read():
            if len(result) > chunk_size:
                break

            result.append(item)

        return result