Пример #1
0
async def report_worker_status(owner: github_types.GitHubLogin) -> None:
    stream_name = f"stream~{owner}".encode()
    r = utils.create_aredis_for_stream()
    streams = await r.zrangebyscore("streams",
                                    min=0,
                                    max="+inf",
                                    withscores=True)

    for pos, item in enumerate(streams):  # noqa: B007
        if item[0] == stream_name:
            break
    else:
        print("* WORKER: Installation not queued to process")
        return

    planned = datetime.datetime.utcfromtimestamp(streams[pos]).isoformat()

    attempts = await r.hget("attempts", stream_name) or 0
    print("* WORKER: Installation queued, "
          f" pos: {pos}/{len(streams)},"
          f" next_run: {planned},"
          f" attempts: {attempts}")

    size = await r.xlen(stream_name)
    print(f"* WORKER PENDING EVENTS for this installation: {size}")
Пример #2
0
async def async_status() -> None:
    worker_per_process: int = config.STREAM_WORKERS_PER_PROCESS
    process_count: int = config.STREAM_PROCESSES
    worker_count: int = worker_per_process * process_count

    redis_stream = utils.create_aredis_for_stream()
    stream_selector = StreamSelector(redis_stream, 0, worker_count)

    def sorter(item):
        stream, score = item
        return stream_selector.get_worker_id_for(stream)

    streams = sorted(
        await redis_stream.zrangebyscore("streams",
                                         min=0,
                                         max="+inf",
                                         withscores=True),
        key=sorter,
    )

    for worker_id, streams_by_worker in itertools.groupby(streams, key=sorter):
        for stream, score in streams_by_worker:
            owner = stream.split(b"~")[1]
            date = datetime.datetime.utcfromtimestamp(score).isoformat(
                " ", "seconds")
            items = await redis_stream.xlen(stream)
            print(
                f"{{{worker_id:02}}} [{date}] {owner.decode()}: {items} events"
            )
Пример #3
0
    async def start(self):
        self._stopping.clear()

        self._redis_stream = utils.create_aredis_for_stream()
        self._redis_cache = utils.create_aredis_for_cache()

        if "stream" in self.enabled_services:
            worker_ids = self.get_worker_ids()
            LOG.info("workers starting", count=len(worker_ids))
            for worker_id in worker_ids:
                self._worker_tasks.append(
                    asyncio.create_task(self.stream_worker_task(worker_id)))
            LOG.info("workers started", count=len(worker_ids))

        if "stream-monitoring" in self.enabled_services:
            LOG.info("monitoring starting")
            self._stream_monitoring_task = asyncio.create_task(
                self.monitoring_task())
            LOG.info("monitoring started")
Пример #4
0
async def async_reschedule_now() -> int:
    parser = argparse.ArgumentParser(description="Rescheduler for Mergify")
    parser.add_argument("org", help="Organization")
    args = parser.parse_args()

    redis = utils.create_aredis_for_stream()
    streams = await redis.zrangebyscore("streams", min=0, max="+inf")
    expected_stream = f"stream~{args.org.lower()}~"
    for stream in streams:
        if stream.decode().lower().startswith(expected_stream):
            scheduled_at = utils.utcnow()
            score = scheduled_at.timestamp()
            transaction = await redis.pipeline()
            await transaction.hdel("attempts", stream)
            await transaction.zadd("streams", **{stream.decode(): score})
            # NOTE(sileht): Do we need to cleanup the per PR attempt?
            # await transaction.hdel("attempts", attempts_key)
            await transaction.execute()
            return 0
    else:
        print(f"Stream for {args.org} not found")
        return 1
Пример #5
0
async def startup():
    global _AREDIS_STREAM, _AREDIS_CACHE
    _AREDIS_STREAM = utils.create_aredis_for_stream(
        max_connections=config.REDIS_STREAM_WEB_MAX_CONNECTIONS)
    _AREDIS_CACHE = utils.create_aredis_for_cache(
        max_connections=config.REDIS_CACHE_WEB_MAX_CONNECTIONS)