async def get(self, token: PipelineToken, session=None) -> Any: timeout = self._manager.get_timeout(token.method) if timeout == 0: raise NotFound item = await sync_to_async(self._cache.get)(token.stringify) if item is None: raise NotFound LOGGER.log( self._log_level, f"[Trace: {self._game.upper()} > RedisCache > {self._alias}] GET: {self._log_template(token)}" ) return pytify(item)
async def get_limits(self, server: str, method: str): limits = [] limits.append(f"{self._api_key}_application_rates_{server}") limits.append(f"{self._api_key}_application_times_{server}") limits.append(f"{self._api_key}_methods_rates_{method}") limits.append(f"{self._api_key}_methods_times_{method}") responses = enumerate(self._redis.mget(limits)) # responses = enumerate(await thread_run(partial(self._redis.mget, limits))) return [ pytify(item) if item is not None else fast_copy(self.defaults[ind]) for (ind, item) in responses ]
async def get(self, token: PipelineToken, session=None) -> Any: timeout = self._manager.get_timeout(token.method) if timeout == 0: raise NotFound await self.connect() item = await self._cache[token.method ].find_one({'token': token.stringify}) if item is None: raise NotFound LOGGER.log( self._log_level, f"[Trace: {self._game.upper()} > MongoDB > {self._alias}] GET: {self._log_template(token)}" ) return pytify(item["data"])
async def migrate_all_to_bson(model: str): model = model.lower() pipeline = pipelines[model] stores: List[MongoDB] = [] for store in pipeline: if isinstance(store, MongoDB): stores.append(store) for store in stores: await store.connect() for method in store._manager: col = store._cache[method] cursor = col.find({}) docs = await cursor.to_list(length=10) while docs: for doc in docs: data_type = doc.get("dataType", "pickle") if data_type == "pickle": await col.update_one({"_id": doc["_id"]}, { "$set": { "data": pytify(doc["data"]), "dataType": "bson" } }) docs = await cursor.to_list(length=10)