def __init__(self): self._block1_sent = cachetools.LFUCache( maxsize=defines.TRANSACTION_LIST_MAX_SIZE) self._block2_sent = cachetools.LFUCache( maxsize=defines.TRANSACTION_LIST_MAX_SIZE) self._block1_receive = cachetools.LFUCache( maxsize=defines.TRANSACTION_LIST_MAX_SIZE) self._block2_receive = cachetools.LFUCache( maxsize=defines.TRANSACTION_LIST_MAX_SIZE)
def init_app(self, app): """A method to lazily initialize the application. Use this when you're using flask factory pattern to create your instances of your flask application. Parameters ---------- app : Flask An instance of your `flask application <http://flask.pocoo.org/docs/1.0/api/#flask.Flask>`_. """ self.client_id = self.client_id or app.config["DISCORD_CLIENT_ID"] self.__client_secret = self.__client_secret or app.config[ "DISCORD_CLIENT_SECRET"] self.redirect_uri = self.redirect_uri or app.config[ "DISCORD_REDIRECT_URI"] self.__bot_token = self.__bot_token or app.config.get( "DISCORD_BOT_TOKEN", str()) self.users_cache = cachetools.LFUCache( app.config.get("DISCORD_USERS_CACHE_MAX_LIMIT", configs.DISCORD_USERS_CACHE_DEFAULT_MAX_LIMIT) ) if self.users_cache is None else self.users_cache if not issubclass(self.users_cache.__class__, Mapping): raise ValueError( "Instance users_cache must be a mapping like object.") self.proxy = self.proxy or app.config.get("DISCORD_PROXY_SETTINGS") self.proxy_auth = self.proxy_auth or app.config.get( "DISCORD_PROXY_AUTH_SETTINGS") app.discord = self
class LocationCache(): cache = cachetools.LFUCache(maxsize=50) def _cache_key(self, location_code, location_type, location_name): # remove smart quotes location_code = unicode(location_code).replace(u"\u2018", "").replace( u"\u2019", "").replace(u"\u201c", "").replace(u"\u201d", "") location_type = unicode(location_type).replace(u"\u2018", "").replace( u"\u2019", "").replace(u"\u201c", "").replace(u"\u201d", "") location_name = unicode(location_name).replace(u"\u2018", "").replace( u"\u2019", "").replace(u"\u201c", "").replace(u"\u201d", "") return sha256('{}:{}:{}'.format(location_code, location_type, location_name)).hexdigest() def get(self, location_code, location_type, location_name): cache_key = self._cache_key(location_code, location_type, location_name) return self.cache.get(cache_key, None) def has(self, location_code, location_type, location_name): cache_key = self._cache_key(location_code, location_type, location_name) return cache_key in self.cache def set(self, location_obj): cache_key = self._cache_key(location_obj.code or '', location_obj.location_type or '', location_obj.name or '') self.cache[cache_key] = location_obj
def __init__(self, storage, cache_size=5, debug=False): # type: (StorageInterface, int, bool) -> None StorageInterface.__init__(self, debug) CacheInterface.__init__(self, storage) self._cache_size = cache_size self._cache = cachetools.LFUCache(cache_size) self._resource_names = None # type: Optional[Set[ResourceName]]
def __init__(self, app, client_id=None, client_secret=None, redirect_uri=None, bot_token=None, users_cache=None, locks_cache=None): self.client_id = client_id or app.config["DISCORD_CLIENT_ID"] self.__client_secret = client_secret or app.config[ "DISCORD_CLIENT_SECRET"] self.redirect_uri = redirect_uri or app.config["DISCORD_REDIRECT_URI"] self.__bot_token = bot_token or app.config.get("DISCORD_BOT_TOKEN", str()) self.users_cache = cachetools.LFUCache( app.config.get("DISCORD_USERS_CACHE_MAX_LIMIT", configs.DISCORD_USERS_CACHE_DEFAULT_MAX_LIMIT) ) if users_cache is None else users_cache self.locks_cache = locks_cache self.locksmith_lock = asyncio.Lock( ) if locks_cache is not None else None if not issubclass(self.users_cache.__class__, Mapping): raise ValueError( "Instance users_cache must be a mapping like object.") if "http://" in self.redirect_uri: os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "true" app.discord = self
def reset(self): if self._size == 0: self._dict = {} elif self._strategy == "lru": self._dict = cachetools.LRUCache(self._size) elif self._strategy == "lfu": self._dict = cachetools.LFUCache(self._size)
def __init__(self, limitbytes, method="LRU"): if method == "LRU": self._cache = cachetools.LRUCache(limitbytes, getsizeof=self.getsizeof) elif method == "LFU": self._cache = cachetools.LFUCache(limitbytes, getsizeof=self.getsizeof) else: raise ValueError("unrecognized method: {0}".format(method))
def resize(self, newsize): new_cache = cachetools.LFUCache(maxsize=newsize, getsizeof=sys.getsizeof) cached = [self._cache.popitem() for x in range(len(self._cache))] for key, val in cached: if sys.getsizeof(val) + new_cache.currsize <= new_cache.maxsize: new_cache[key] = val self._cache = new_cache
def __init__(self): self._nodes = { ast.Expression: self._eval_expression, ast.Dice: self._eval_dice } self._parse_cache = cachetools.LFUCache(256)
def __init__(self, app, users_cache=None): self.client_id = app.config["DISCORD_CLIENT_ID"] self.client_secret = app.config["DISCORD_CLIENT_SECRET"] self.redirect_uri = app.config["DISCORD_REDIRECT_URI"] self.users_cache = cachetools.LFUCache( app.config.get("DISCORD_USERS_CACHE_MAX_LIMIT", configs.DISCORD_USERS_CACHE_DEFAULT_MAX_LIMIT) ) if users_cache is None else users_cache if not issubclass(self.users_cache.__class__, Mapping): raise ValueError("Instance users_cache must be a mapping like object.") if "http://" in self.redirect_uri: os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "true" app.discord = self
def __init__(self, limitbytes, method="LRU"): from uproot.rootio import _memsize m = _memsize(limitbytes) if m is not None: limitbytes = int(math.ceil(m)) if method == "LRU": self._cache = cachetools.LRUCache(limitbytes, getsizeof=self.getsizeof) elif method == "LFU": self._cache = cachetools.LFUCache(limitbytes, getsizeof=self.getsizeof) else: raise ValueError("unrecognized method: {0}".format(method))
def __init__(self, context=None): if context is None: context = RollContext() self._nodes = { ast.Expression: self._eval_expression, ast.AnnotatedNumber: self._eval_annotatednumber, ast.Literal: self._eval_literal, ast.Parenthetical: self._eval_parenthetical, ast.UnOp: self._eval_unop, ast.BinOp: self._eval_binop, ast.OperatedSet: self._eval_operatedset, ast.NumberSet: self._eval_numberset, ast.OperatedDice: self._eval_operateddice, ast.Dice: self._eval_dice } self._parse_cache = cachetools.LFUCache(256) self.context = context
def __init__(self, read_cache=False): self._cache = None self._backup = None if read_cache: path = os.path.join(options.cfg.cache_path, "coq_cache.db") if os.path.exists(path): try: self._cache = pickle.load(open(path, "rb")) s = "Using query cache (current size: {}, max size: {})." if options.cfg.verbose: s = s.format(self._cache.currsize, self._cache.maxsize) logger.info(s) print(s) except (IOError, ValueError, EOFError): S = "Cannot read query cache, creating a new one (size: {})." S = S.format(options.cfg.query_cache_size) logger.warning(S) if self._cache is None: self._cache = cachetools.LFUCache( maxsize=options.cfg.query_cache_size, getsizeof=sys.getsizeof)
def __init__(self, routes: typing.Optional[typing.List[Route]] = None): self.routes: typing.List[Route] = routes or [] self._cache: typing.Mapping[str, Route] = cachetools.LFUCache(256)
def clear(self): self._backup = self._cache self._cache = cachetools.LFUCache(maxsize=self._backup.maxsize, getsizeof=sys.getsizeof)
import cachetools import falcon import pytube import config import framework from falcon_cors import CORS from wsgiref.simple_server import make_server from gevent.pywsgi import WSGIServer from pytube.extract import watch_url from pytube.helpers import safe_filename # Best soundtrack: BJhF0L7pfo8 video_cache = cachetools.LFUCache(maxsize=config.CACHE_SIZE) class Track: @staticmethod def on_get(request, response): variables = {} video_id = request.get_param("id", required=True, store=variables) _type = request.get_param("type", required=False, store=variables, default="audio") _format = request.get_param("format", required=False, store=variables, default="mp4") # TODO: Handle quality filtering # quality = request.get_param_as_int("quality", required=False, store=variables, default="128")
# -*- coding: utf-8 -*- from __future__ import annotations import cachetools from typing import List, Optional from . import db from .client import bot # Internal cache of botbanned users, to reduce unnecessary database queries _botban_cache = cachetools.LFUCache(100) async def is_botbanned(user_id: int, guild_id: Optional[int]) -> bool: """ Return if this user is botbanned in this guild. :param user_id: ID of user :param guild_id: ID of guild :return: boolean of whether the user is botbanned """ if guild_id is None: return False return user_id in await get_guild_botbans(guild_id) async def get_user_botbans(user_id: int) -> List[int]: """ Retrieve a list of all guilds this user is botbanned in.
def __init__(self): self._relations = cachetools.LFUCache( maxsize=defines.TRANSACTION_LIST_MAX_SIZE)
# -*- coding: utf-8 -*- from __future__ import annotations import cachetools from typing import List, Optional from . import db from .client import bot # Internal cache of guild invokers, to reduce unnecessary database queries _invoker_cache = cachetools.LFUCache(100) async def get_alias(guild_id: int) -> List[str]: """ Search the database for any invoker aliases in this guild. :param guild_id: ID of guild to search :return: List of all accepted invokers, including the default invoker and ping invokers """ if guild_id not in _invoker_cache: ret = await db.fetchall( """ SELECT callstr FROM invokers WHERE guild_id = %s; """, guild_id) ret = bot.ping_invokers + [row[0] for row in ret] if None in ret:
# -*- coding: utf-8 -*- from typing import List, Optional import cachetools from . import db _toggle_cache = cachetools.LFUCache(100) async def is_toggled(guild_id: Optional[int], path: str) -> bool: """ Return if the given command path is disabled in the given guild ID. :param guild_id: ID of guild to search in :param path: Command path :return: Boolean of if the command is disabled """ if guild_id is None: return False try: return guild_id in _toggle_cache[path] except KeyError: ret = await db.fetchall( """ SELECT guild_id FROM toggles WHERE command = %s """, path) ret = [row[0] for row in ret]
""" output = [command.id, command.id + "_"] # Underscore to access ID if command is shadowed by language. for language in LanguageManager.data: element = LanguageManager.get_language_element(command.path, language) names = [element.get("name")] alias = element.get("alias") if alias is not None: names.extend(alias.split()) output.extend(f"{language} {key}" for key in names) return output _guild_cache = cachetools.LFUCache(100) _channel_cache = cachetools.LFUCache(500) async def get_lang(guild_id, channel_id): """ Retrieve the language for a channel, taking into account channel overrides. :param guild_id: Guild ID :param channel_id: Channel ID :return: Language name """ try: channel_lang = _channel_cache[channel_id] except KeyError: channel_lang = await db.fetchone(
def cache(self): return cachetools.LFUCache(10)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import pickle, os, time, asyncio, concurrent, functools, bisect, random, shutil from glob import glob from itertools import accumulate from collections import defaultdict import unicodedataplus as unicodedata import regex as re import config import cachetools import aiopg model_cache = cachetools.LFUCache(10) BEGIN = "\u0002" # Start of Text END = "\u0003" # End of Text async def init(): global pool, executor pool = await aiopg.create_pool("dbname={} user={}".format(config.DB_NAME, config.DB_USER), minsize=config.DB_POOL_MIN, maxsize=config.DB_POOL_MAX) executor = concurrent.futures.ProcessPoolExecutor(max_workers=config.MAX_WORKERS) async def run_in_process(*args, **kwargs): loop = asyncio.get_event_loop() return await loop.run_in_executor(executor, functools.partial(*args, **kwargs)) def _pickle_load(path): return pickle.load(open(path, 'rb')) async def pickle_load(*args):
""" formatter = _logging.Formatter( fmt= "%(asctime)s — %(name)s — %(levelname)s — %(message)s — %(funcName)s:%(lineno)d" ) handler = _logging.FileHandler(_log_file, mode="a") handler.setFormatter(formatter) logger = _logging.getLogger(name) logger.setLevel(_logging.DEBUG) logger.addHandler(handler) return logger _logger = _setup_logger("default") cache = _cachetools.LFUCache(32) class BakalibError(Exception): """:class:`Exception` subclass, used for differentiating between Python exceptions and bakalib exceptions """ pass class Base: """Base class for most of the classes present in this library """ @_cachetools.cached(cache) def request(self, **kwargs: str) -> dict: """Generic request method