def test_custom_client(self): class MyClient(Client): pass client = PooledClient(('host', 11211)) client.client_class = MyClient assert isinstance(client.client_pool.get(), MyClient)
def make_client_pool(self, hostname, mock_socket_values, serializer=None, **kwargs): mock_client = Client(hostname, serializer=serializer, **kwargs) mock_client.sock = MockSocket(mock_socket_values) client = PooledClient(hostname, serializer=serializer) client.client_pool = pool.ObjectPool(lambda: mock_client) return mock_client
def __init__(self, server=('localhost', 11211), key_prefix='', del_on_server=False, raise_on_key=False, raise_on_none=True): self._client = PooledClient(server, key_prefix=key_prefix, serializer=self.serialize, deserializer=self.deserialize) self._keys = set() self._del_on_server = del_on_server self._raise_on_key = raise_on_key self._raise_on_none = raise_on_none
def _get_client_pool(self, max_pool_size=None): try: # Copied from the doc comment for Client. def serialize_json(key, value): if type(value) == str: return value, _STRING_TYPE return json.dumps(value), _JSON_TYPE def deserialize_json(key, value, flags): if flags == _STRING_TYPE: return value if flags == _JSON_TYPE: return json.loads(value) raise Exception("Unknown flags for value: {1}".format(flags)) return PooledClient( self.endpoint, no_delay=True, timeout=self.timeout, connect_timeout=self.connect_timeout, key_prefix="data_model_cache__", serializer=serialize_json, deserializer=deserialize_json, max_pool_size=max_pool_size, ignore_exc=False, ) except: logger.exception( "Got exception when creating memcached client to %s", self.endpoint) return None
def pool_from_config(app_config, prefix="memcache.", serializer=None, deserializer=None): """Make a PooledClient from a configuration dictionary. The keys useful to :py:func:`pool_from_config` should be prefixed, e.g. ``memcache.endpoint``, ``memcache.max_pool_size``, etc. The ``prefix`` argument specifies the prefix used to filter keys. Each key is mapped to a corresponding keyword argument on the :py:class:`~pymemcache.client.base.PooledClient` constructor. Supported keys: * ``endpoint`` (required): a string representing a host and port to connect to memcached service, e.g. ``localhost:11211`` or ``127.0.0.1:11211``. * ``max_pool_size``: an integer for the maximum pool size to use, by default this is ``2147483648``. * ``connect_timeout``: a float representing seconds to wait for a connection to memcached server. Defaults to the underlying socket default timeout. * ``timeout``: a float representing seconds to wait for calls on the socket connected to memcache. Defaults to the underlying socket default timeout. :param dict app_config: the config dictionary :param str prefix: prefix for config keys :param callable serializer: function to serialize values to strings suitable for being stored in memcached. An example is :py:func:`~baseplate.context.memcache.lib.make_dump_and_compress_fn`. :param callable deserializer: function to convert strings returned from memcached to arbitrary objects, must be compatible with ``serializer``. An example is :py:func:`~baseplate.context.memcache.lib.decompress_and_load`. :returns: :py:class:`pymemcache.client.base.PooledClient` """ assert prefix.endswith(".") parser = config.SpecParser({ "endpoint": config.Endpoint, "max_pool_size": config.Optional(config.Integer, default=None), "connect_timeout": config.Optional(config.Float, default=None), "timeout": config.Optional(config.Float, default=None), "no_delay": config.Optional(config.Boolean, default=True), }) options = parser.parse(prefix[:-1], app_config) return PooledClient( server=options.endpoint.address, connect_timeout=options.connect_timeout, timeout=options.timeout, serializer=serializer, deserializer=deserializer, no_delay=options.no_delay, max_pool_size=options.max_pool_size, )
def init_app(self, app): ''' Initializes the MemcachedClient with a Flask App ''' self.client = PooledClient( f"{app.config['THREAT_POLLER_MEMCACHED_HOST']}:{app.config['THREAT_POLLER_MEMCACHED_PORT']}", max_pool_size=app.config['MEMCACHED_POOL_SIZE'])
def MEMCACHED_CLIENT(): global MEMCACHED__CLIENT__ global MEMCACHED_SERVER global MEMCACHED_VERSION global flush_types if not MEMCACHED_SERVER: try: MEMCACHED_SERVER = eval(request.env['ir.config_parameter'].get_param('website_memcached.memcached_db') or '("localhost",11211)') except: MEMCACHED_SERVER = ("localhost",11211) if not MEMCACHED__CLIENT__: try: #~ if type(servers) == list: #~ MEMCACHED__CLIENT__ = HashClient(servers, serializer=serialize_pickle, deserializer=deserialize_pickle) #~ else: MEMCACHED__CLIENT__ = PooledClient(MEMCACHED_SERVER, serializer=serialize_pickle, deserializer=deserialize_pickle,no_delay=MEMCACHE_NODELAY,connect_timeout=MEMCACHE_CONNECT_TIMEOUT,timeout=MEMCACHE_TIMEOUT) MEMCACHED_VERSION = MEMCACHED__CLIENT__.version() ## Retreive all flush_types per database # https://www.tutorialspoint.com/memcached/memcached_stats_items.htm # echo "stats items"|nc localhost 11211|grep number # STAT items:11:number 3 # STAT items:12:number 3 # STAT items:13:number 1 # STAT items:14:number 19 # STAT items:15:number 1212 items = MEMCACHED__CLIENT__.stats('items') slab_limit = {k.split(':')[1]:v for k,v in MEMCACHED__CLIENT__.stats('items').items() if k.split(':')[2] == 'number' } # slab -> limit # echo "stats cachedump 15 1212 "|nc localhost 11211 # slab limit # ITEM 4092067750 [2231 b; 1561018218 s] # ITEM 3699334878 [1974 b; 1560964179 s] # ITEM 2768968127 [2071 b; 1560968016 s] # ITEM 2482188247 [2126 b; 1561020033 s] # ITEM 2293401784 [2086 b; 1560972986 s] key_lists = [MEMCACHED__CLIENT__.stats('cachedump',slab,str(limit)) for slab,limit in slab_limit.items()] # List of lists keys = [key for sublist in key_lists for key in sublist.keys()] # [4092067750, 3699334878 ...] flattended list for key in keys: page = MEMCACHED__CLIENT__.get(key) # echo "get 4092067750 "|nc localhost 11211 -> dict with data if page and page.get('db'): if not flush_types.get(page['db'], None): flush_types[page['db']] = set() flush_types[page['db']].add(page.get('flush_type')) except Exception as e: err = sys.exc_info() error = ''.join(traceback.format_exception(err[0], err[1], err[2])) _logger.info('Cannot instantiate MEMCACHED CLIENT\n%s' % error) raise MemcacheServerError(e) except TypeError as e: _logger.info('Type error MEMCACHED CLIENT %s.' % e) raise MemcacheServerError(e) return MEMCACHED__CLIENT__
def add_server(self, server, port): key = '%s:%s' % (server, port) if self.use_pooling: client = PooledClient((server, port), **self.default_kwargs) else: client = Client((server, port), **self.default_kwargs) self.clients[key] = client self.hasher.add_node(key)
def setUp(self): pool = PooledClient(server=("localhost", 11211)) factory = MemcacheContextFactory(pool) self.baseplate_observer = TestBaseplateObserver() baseplate = Baseplate() baseplate.register(self.baseplate_observer) baseplate.add_to_context("memcache", factory) self.context = mock.Mock() self.server_span = baseplate.make_server_span(self.context, "test")
class PymemcacheCacheBackend(NoCacheBackend): def __init__(self, config): cache_settings = config['cache_backend_settings'] host = cache_settings.get('host') port = cache_settings.get('port') if not host or not port: raise ValueError( ('Must provide `config.cache_backend_settings.[host|port]`' ' with `PymemcacheCacheBackend!')) self.host = host self.port = port self._make_client() def _make_client(self): self.client = PooledClient( (self.host, self.port), serde=serde.pickle_serde, ) def get(self, key): try: return self.client.get(key) except Exception as e: print(f'FAILED CACHE GET: {e.__class__.__name__}({e.args})') def set(self, key, value): try: return self.client.set(key, value) except Exception as e: print(f'FAILED CACHE SET: {e.__class__.__name__}({e.args})') def delete(self, key): try: return self.client.delete(key) except Exception as e: print(f'FAILED CACHE DELETE: {e.__class__.__name__}({e.args})')
def pool_from_config(app_config, prefix="memcache.", **kwargs): """Make a PooledClient from a configuration dictionary. The keys useful to :py:func:`pool_from_config` should be prefixed, e.g. ``memcache.endpoint``, ``memcache.max_pool_size``, etc. The ``prefix`` argument specifies the prefix used to filter keys. Each key is mapped to a corresponding keyword argument on the `PooledClient <https://pymemcache.readthedocs.io/en/latest/apidoc/pymemcache.client.base.html#pymemcache.client.base.PooledClient>`_ constructor. Supported keys: * ``endpoint`` (required): a string representing a host and port to connect to memcached service, e.g. ``localhost:11211`` or ``127.0.0.1:11211``. * ``max_pool_size``: an integer for the maximum pool size to use, by default this is ``2147483648``. * ``connect_timeout``: a float representing seconds to wait for a connection to memcached server. Defaults to the underlying socket default timeout. * ``timeout``: a float representing seconds to wait for calls on the socket connected to memcache. Defaults to the underlying socket default timeout. """ assert prefix.endswith(".") config_prefix = prefix[:-1] cfg = config.parse_config( app_config, { config_prefix: { "endpoint": config.Endpoint, "max_pool_size": config.Optional(config.Integer, default=None), "connect_timeout": config.Optional(config.Float, default=None), "timeout": config.Optional(config.Float, default=None), }, }) options = getattr(cfg, config_prefix) if options.max_pool_size is not None: kwargs.setdefault("max_pool_size", options.max_pool_size) if options.connect_timeout is not None: kwargs.setdefault("connect_timeout", options.connect_timeout) if options.timeout is not None: kwargs.setdefault("timeout", options.timeout) return PooledClient(options.endpoint.address, **kwargs)
def MEMCACHED_CLIENT(): global MEMCACHED__CLIENT__ global MEMCACHED_SERVER global MEMCACHED_VERSION global flush_types if not MEMCACHED_SERVER: try: MEMCACHED_SERVER = eval(request.env['ir.config_parameter'].get_param('website_memcached.memcached_db') or '("localhost",11211)') except: MEMCACHED_SERVER = ("localhost",11211) if not MEMCACHED__CLIENT__: try: #~ if type(servers) == list: #~ MEMCACHED__CLIENT__ = HashClient(servers, serializer=serialize_pickle, deserializer=deserialize_pickle) #~ else: MEMCACHED__CLIENT__ = PooledClient(MEMCACHED_SERVER, serializer=serialize_pickle, deserializer=deserialize_pickle,no_delay=MEMCACHE_NODELAY,connect_timeout=MEMCACHE_CONNECT_TIMEOUT,timeout=MEMCACHE_TIMEOUT) MEMCACHED_VERSION = MEMCACHED__CLIENT__.version() items = MEMCACHED__CLIENT__.stats('items') slab_limit = {k.split(':')[1]:v for k,v in MEMCACHED__CLIENT__.stats('items').items() if k.split(':')[2] == 'number' } key_lists = [MEMCACHED__CLIENT__.stats('cachedump',slab,str(limit)) for slab,limit in slab_limit.items()] keys = [key for sublist in key_lists for key in sublist.keys()] for key in keys: page = MEMCACHED__CLIENT__.get(key) if page and page.get('db'): if not flush_types.get(page['db'], None): flush_types[page['db']] = set() flush_types[page['db']].add(page.get('flush_type')) #~ server: tuple(hostname, port) #~ serializer: optional function, see notes in the class docs. #~ deserializer: optional function, see notes in the class docs. #~ connect_timeout: optional float, seconds to wait for a connection to #~ the memcached server. Defaults to "forever" (uses the underlying #~ default socket timeout, which can be very long). #~ timeout: optional float, seconds to wait for send or recv calls on #~ the socket connected to memcached. Defaults to "forever" (uses the #~ underlying default socket timeout, which can be very long). #~ no_delay: optional bool, set the TCP_NODELAY flag, which may help #~ with performance in some cases. Defaults to False. #~ ignore_exc: optional bool, True to cause the "get", "gets", #~ "get_many" and "gets_many" calls to treat any errors as cache #~ misses. Defaults to False. #~ socket_module: socket module to use, e.g. gevent.socket. Defaults to #~ the standard library's socket module. #~ key_prefix: Prefix of key. You can use this as namespace. Defaults #~ to b''. #~ default_noreply: bool, the default value for 'noreply' as passed to #~ store commands (except from cas, incr, and decr, which default to #~ False). #~ allow_unicode_keys: bool, support unicode (utf8) keys #http://pymemcache.readthedocs.io/en/latest/getting_started.html except Exception as e: err = sys.exc_info() error = ''.join(traceback.format_exception(err[0], err[1], err[2])) _logger.info('Cannot instantiate MEMCACHED CLIENT\n%s' % error) raise MemcacheServerError(e) except TypeError as e: _logger.info('Type error MEMCACHED CLIENT %s.' % e) raise MemcacheServerError(e) return MEMCACHED__CLIENT__
def get(self, blueprint): u = ct.find_one({'email': self.email}) if u is None: return None else: return u def set(self, blueprint, token): ct.update_one({'email': self.email}, {'$set': {'token': token}}) def delete(self, blueprint): ct.update({'email': self.email}, {'$pull': {'email': self.email}}) client = PooledClient('localhost') app = Flask(__name__) app.config['MONGO_DBNAME'] = 'choosytable' app.config['MONGO_URI'] = 'mongodb://localhost:27017/choosytable' os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1' os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = '1' app.secret_key = os.urandom(24).hex() app.config['GOOGLE_OAUTH_CLIENT_ID'] = os.environ.get("GOOGLE_CLIENT_ID") app.config['GOOGLE_OAUTH_CLIENT_SECRET'] = os.environ.get( "GOOGLE_CLIENT_SECRET") blueprint = make_google_blueprint( client_id=os.environ.get("GOOGLE_CLIENT_ID"), client_secret=os.environ.get("GOOGLE_CLIENT_SECRET"), scope=["profile", "email"], offline=True,
from flask import Flask from flask_cors import CORS import os from pymemcache.client.base import PooledClient cache = PooledClient('memcached:11211', max_pool_size=10, encoding="utf-8") app = Flask(__name__) CORS(app) if os.getenv('STAGE') == 'test': app.config['BACKEND_URL'] = 'http://localhost:8008' else: app.config['BACKEND_URL'] = 'http://backend:8001' app.config['API_URL'] = app.config['BACKEND_URL'] + '/api' app.config['TOKEN_URL'] = app.config['API_URL'] + '/token/' app.config['ALUMNOS_URL'] = app.config['API_URL'] + '/alumnos/' app.config['CARRERAS_URL'] = app.config['API_URL'] + '/carreras/{}' app.config['MATERIASCURSADAS_URL'] = app.config['CARRERAS_URL'] + \ '/materiascursadas/' app.config['INSCRIPCIONES_URL'] = app.config['CARRERAS_URL'] + \ '/inscripciones/' app.config['PLAN_URL'] = app.config['CARRERAS_URL'] + '/planes/{}/' app.config['ALUMNOS_CARRERA_URL'] = app.config['CARRERAS_URL'] + '/alumnos/' app.config['CURSANTES_URL'] = app.config['CARRERAS_URL'] + \ '/cantidad-cursantes/' app.config['INGRESANTES_URL'] = app.config['CARRERAS_URL'] + \ '/cantidad-ingresantes/' app.config['GRADUADOS_URL'] = app.config['CARRERAS_URL'] + \ '/cantidad-graduados/' app.config['POSTULANTES_URL'] = app.config['CARRERAS_URL'] + \
with open(DEFAULT_CONFIG_PATH, "rt") as f: logger_config = json.load(f) logger_config["loggers"]["main"]["level"] = args.logging_level logging.config.dictConfig(logger_config) else: logging.basicConfig(level=logging.INFO) logger = logging.getLogger("main") max_workers = int(args.max_workers or config["file_handler"]["max_workers"]) number_of_links = int(args.number_of_links or config["file_handler"]["number_of_links"]) directory = args.directory or config["file_handler"]["default_directory"] path_to_file_save = os.path.join("..", directory) url_link = args.link or config["file_handler"]["url_link"] cache = PooledClient(config["memcached"]["ip"], max_pool_size=max_workers) path_to_db = config["db"]["path_to_db"] db = sqlite3.connect(path_to_db) initial_db(db, logger) wiki = ThreadPoolLinkHandler(url_link, max_workers) wiki.runner()
def setUp(self): pool = PooledClient(server=memcached_endpoint.address) self.backend_factory = MemcacheRateLimitBackendContextFactory(pool) super(MemcacheRateLimitBackendTests, self).setUp()
from pymemcache.client import base from pymemcache.client.base import PooledClient import time import threading import logging from random import seed from random import choice # client = base.Client(('localhost', 11211), connect_timeout=1, timeout=0.5) # client = PooledClient(('127.0.0.1', 11211), max_pool_size=20, connect_timeout=1, timeout=0.5) # m=no maxpool means its higher limit - 2147483648L client = PooledClient(('127.0.0.1', 11211), connect_timeout=1, timeout=0.5) def test_memcached(id=0, fr=0, lim=10000): print(f'throttling for {fr}+{lim - 1} sequential requests') ms_start = time.time() * 1000.0 for i in range(fr, lim): val = client.get(f'some_key_{i}') # print(val) if val is None: print('cache miss') print(f'finished - {id}') def fill_cache_entries(num_entries): if not client: print("not connected")
def make_client(self, mock_socket_values, serializer=None): mock_client = Client(None, serializer=serializer, key_prefix=b'xyz:') mock_client.sock = MockSocket(list(mock_socket_values)) client = PooledClient(None, serializer=serializer, key_prefix=b'xyz:') client.client_pool = pool.ObjectPool(lambda: mock_client) return client
class MemcachedCache(Cache): def serialize(self, key, value): print key, type(value) if type(value) == str: # only str, not unicode or extend classes #print 1, len(value) return value, 1 if isinstance(value, np.ndarray): # any ndarray #return zlib.compress(value.dumps()), 2 #print 2, key, len(value.dumps()) return value.dumps(), 2 # other types #print 3, len(json.dumps(value)) try: print json.dumps(value) except Exception as e: #print 4, e raise return json.dumps(value), 3 def deserialize(self, key, value, flags): if flags == 1: # str return value if flags == 2: # ndarray #return np.loads(zlib.decompress(value)) return np.loads(value) if flags == 3: # other return json.loads(value) raise TypeError("Unknown flags for value: %d" % flags) def __init__(self, server=('localhost', 11211), key_prefix='', del_on_server=False, raise_on_key=False, raise_on_none=True): self._client = PooledClient(server, key_prefix=key_prefix, serializer=self.serialize, deserializer=self.deserialize) self._keys = set() self._del_on_server = del_on_server self._raise_on_key = raise_on_key self._raise_on_none = raise_on_none def __getitem__(self, key): if self._raise_on_key and key not in self._keys: raise KeyError value = self._client.get(key) if self._raise_on_none and value is None: raise KeyError return value def __setitem__(self, key, value): self._client.set(key, value) self._keys.add(key) def __delitem__(self, key): if self._del_on_server: self._client.delete(key) self._keys.discard(key) def __len__(self): return len(self._keys) def __iter__(self): return self._keys.__iter__() def keys(self): return list(self._keys) def clear(self): for key in self._keys: del self[key] def __del__(self): self.clear() self._client.close()
def make_client(self, mock_socket_values, **kwargs): mock_client = Client(None, **kwargs) mock_client.sock = MockSocket(list(mock_socket_values)) client = PooledClient(None, **kwargs) client.client_pool = pool.ObjectPool(lambda: mock_client) return client
from django.core.management.base import BaseCommand from core.models import MateriaCursada from core.serializers import MateriaCursadaSerializer from pymemcache.client.base import PooledClient from django.conf import settings import json cache = PooledClient(settings.MEMCACHED_URL, max_pool_size=4, encoding="utf-8") class Command(BaseCommand): def handle(self, *args, **kwargs): carrera = kwargs['carrera'] materias_cursadas = MateriaCursada.objects.filter(carrera__codigo=carrera) data = MateriaCursadaSerializer(materias_cursadas, many=True).data cache.set(carrera, json.dumps(data, ensure_ascii=False).encode('utf8')) print('La caché fue actualizada') def add_arguments(self , parser): parser.add_argument( '--carrera', help='Asigno una carrera para ser guardada por memcached', )
def make_client(self, mock_socket_values, **kwargs): mock_client = Client(None, key_prefix=b'xyz:', **kwargs) mock_client.sock = MockSocket(list(mock_socket_values)) client = PooledClient(None, key_prefix=b'xyz:', **kwargs) client.client_pool = pool.ObjectPool(lambda: mock_client) return client
def _make_client(self): self.client = PooledClient( (self.host, self.port), serde=serde.pickle_serde, )
def __get_cache(self): if not self.__cache: Logger.debug("__get_cache") from config.dev import config self.__cache = PooledClient(server=config['cache']['server']) return self.__cache
async def setup_session(request: Request) -> JSONResponse: request.session.update({"data": "session_data"}) return JSONResponse({"session": request.session}) async def clear_session(request: Request): request.session.clear() return JSONResponse({"session": request.session}) def view_session(request: Request) -> JSONResponse: return JSONResponse({"session": request.session}) routes = [ Route("/setup_session", endpoint=setup_session), Route("/clear_session", endpoint=clear_session), Route("/view_session", endpoint=view_session), ] memcache_client = PooledClient(host="localhost", port=11211) app = Starlette(debug=True, routes=routes) app.add_middleware( SessionMiddleware, secret_key="secret", cookie_name="cookie22", backend_type=BackendType.memcache, backend_client=memcache_client, )