async def create_rule(ctx: object, db: AioRedis): """ Creates a rate limiter rule. @param ctx: (object) data to be inserted @param db: (object) db connection """ ctx['_id'] = str(bson.ObjectId()) await asyncio.gather( RateLimiter._set_indexes(ctx, db), db.hmset_dict(ctx['_id'], ctx), db.sadd(rules_set, ctx['_id']), )
async def create_entry(ctx, db: AioRedis): """ Creates a rate limiter entry. @param ctx: (object) data to be inserted @param db: (object) db connection """ ctx['_id'] = str(bson.ObjectId()) await asyncio.gather( RateLimiter._set_indexes(ctx, db), db.hmset_dict(ctx['_id'], ctx), db.sadd(entry_set, ctx['_id']), db.expire(ctx['_id'], int(ctx['timeout'])) )
async def reset_redis(redis: Redis, loop: AbstractEventLoop, hard: bool = False) -> None: if hard: await redis.flushall() else: await redis.delete(q_key, info_key, seen_key, scope_key, done_key), await redis.hset(info_key, "crawl_depth", 2), for url in default_seed_list: await asyncio.gather( redis.rpush(q_key, ujson.dumps({ "url": url, "depth": 0 })), redis.sadd(seen_key, url), loop=loop, )
async def create(ctx: object, endpoint_cacher_db: AioRedis, service_db): """ creates an endpoint cache @param ctx: (object) data to be inserted @param endpoint_cacher_db: (object) db connection @param service_db: (object) db connection """ ctx['_id'] = str(bson.ObjectId()) if 'service_id' in ctx: await Service.check_exists(ctx['service_id'], service_db) if 'response_codes' in ctx: response_codes = ctx['response_codes'] response_codes_id = str(bson.ObjectId()) for response_code in response_codes: await endpoint_cacher_db.sadd(response_codes_id, response_code) ctx['response_codes'] = response_codes_id await asyncio.gather( EndpointCacher._set_indexes(ctx, endpoint_cacher_db), endpoint_cacher_db.hmset_dict(ctx['_id'], ctx), endpoint_cacher_db.sadd(endpoint_cache_set, ctx['_id']), )