class MemcachedSessionStore(SessionStore): """A session store that saves sessions with memcache. :param session_class: The session class to use. Defaults to :model:`Session`. :param servers: a list or tuple of server addresses or a compatible client. Defaults to `['127.0.0.1:11211']`. :param default_timeout: the default timeout that is used if no timeout is specified. A timeout of 0 indicates that the cache never expires. :param key_prefix: a prefix that is added before all keys. This makes it possible to use the same memcached server for different applications. """ def __init__(self, session_class=None, servers=None, default_timeout=600, key_prefix=None): SessionStore.__init__(self, session_class) self.mc = MemcachedCache(servers, default_timeout, key_prefix) def get(self, key): return self.mc.get(key) def set(self, key, value, timeout=None): self.mc.set(key, value, timeout) def get_session(self, sid): data = self.get(sid) if data is None: data = {} return self.session_class(dict(data), sid, False)
def __init__(self, session_class=None, servers=None, default_timeout=600, key_prefix=None): SessionStore.__init__(self, session_class) self.mc = MemcachedCache(servers, default_timeout, key_prefix)
def __init__(self): if webapp.config['APP_ENV'] == 'dev': from werkzeug.contrib.cache import SimpleCache self.cache = SimpleCache() else: from werkzeug.contrib.cache import MemcachedCache self.cache = MemcachedCache(['127.0.0.1:11211'])
def __init__(self, session_class=None): super(Store, self).__init__(session_class) if settings.DATABASE_ENGINE == 'gae': self.cache = GAEMemcachedCache(default_timeout=0) else: server = settings.SESSION_OPTIONS.get('memcached_servers', []) self.cache = MemcachedCache(servers, default_timeout=0)
def test_default_werkzeug_cache(self): with self.settings(CELERY_ALWAYS_EAGER=False): app = Celery() app.config_from_object(settings) self.task.bind(app) app.finalize() async_task = self.task.delay(**self.kwargs) cache = MemcachedCache(app.backend.client) self.assertEqual(async_task.status, states.PENDING) self.assertNotEqual(cache.get("herd:%s" % self.key), None)
def test_default_werkzeug_cache(self): with self.settings(CELERY_ALWAYS_EAGER=False): app = Celery() app.config_from_object(settings) self.task.bind(app) app.finalize() async_task = self.task.delay(**self.kwargs) cache = MemcachedCache(app.backend.client) self.assertEqual(async_task.status, states.PENDING) self.assertNotEqual(cache.get('herd:%s' % self.key), None)
def __init__(self, timeout=18000, version=None): cache_host = os.environ.get('CACHE_HOST') cache_port = os.environ.get('CACHE_PORT') # cache_username = os.environ.get('CACHE_USERNAME') # cache_password = os.environ.get('CACHE_PASSWORD') self._cache = MemcachedCache(['{}:{}'.format(cache_host, cache_port)]) # self._cache = bmemcached.Client( # [cache_host], username=cache_username, password=cache_password) self._timeout = timeout self._version = version
def get_sfc_uuid(): """Retrieves a unique identifier in order to compose a SFC Retrieves a uuid4 identifier to compose a SFC and get a copy of the vnffg template. :return: a unique identifier str. """ vnffgd = deepcopy(vnffgd_template) sfc_uuid = str(uuid.uuid4()) cache = MemcachedCache() cache.set(sfc_uuid, vnffgd) return jsonify({'sfc_uuid': sfc_uuid})
def _memcache(self, **kwargs): """Returns a :class:`MemcachedCache` instance""" kwargs.update(dict( servers=self._config('MEMCACHED_SERVERS', None), key_prefix=self._config('key_prefix', None), )) return MemcachedCache(**kwargs)
def setup_cache(app): app.cache = None servers = app.config.get('MEMCACHED_SERVERS') key_prefix = app.config.get('MEMCACHED_PREFIX') if servers: app.cache = MemcachedCache(servers=[servers], key_prefix=key_prefix) app.cache.set('sc-test', 'sc-value')
def __set_cache(self): self.logger.debug("Setting up the cache") if hasattr(self.config, 'cache') and "MEMCACHE_BACKENDS" in self.config.cache: self.cache = MemcachedCache(self.config.cache["MEMCACHE_BACKENDS"]) else: self.cache = SimpleCache()
def prepare(self): self.prepared = True # Cache if self.config.CACHE_TYPE == 'redis': self.cache = RedisCache(host=self.config.CACHE_SERV) elif self.config.CACHE_TYPE == 'memcached': self.cache = MemcachedCache(servers=[self.config.CACHE_SERV]) else: self.cache = FileSystemCache(self.config.CACHE_SERV) # Options from .admin import Option self.options = Option.auto_load() # Timer @self.app.before_request def before_request(): g.start = time.time() # Medias self.app.add_url_rule(self.app.config['UPLOAD_DIRECTORY_URL'] + '<filename>', 'FyPress.uploaded_file', build_only=True) self.app.wsgi_app = SharedDataMiddleware( self.app.wsgi_app, { self.app.config['UPLOAD_DIRECTORY_URL']: self.app.config['UPLOAD_DIRECTORY'] })
def init_cache(app): servers = app.config.get('MEMCACHED_SERVERS', ['127.0.0.1:11211']) if not servers: servers = ['localhost:11211'] prefix = app.config.get('MEMCACHED_PREFIX', '') app.cache = MemcachedCache(servers=servers, key_prefix=prefix)
def get_the_cache(): if app.config.get('CACHE_SERVER', None): from werkzeug.contrib.cache import MemcachedCache cache = MemcachedCache(app.config['CACHE_SERVER']) else: from werkzeug.contrib.cache import SimpleCache cache = SimpleCache() return cache
def connect(self): try: conf = app.config.get('MEMCACHED') cache = MemcachedCache(**conf) except: cache = NullCache() return cache
def __init__(self, query, params=None, from_file=True, model_class=None, database='default', sessions=sessions, cached=False, replace=None, config=None, cache_timeout=3600 * 12): self.config = self.default_configuration self.config.update_recursive(configs.get('mysql', {})) if config: self.config.update_recursive(config) # add project path queries directory if not already configured project_queries = os.path.join(get_project_path(), 'queries') if not project_queries in self.config['query_directories']: self.config['query_directories'].append(project_queries) if from_file: if query in file_queries: logger.debug('Getting query file from query files cache %s' % query) self.query = file_queries[query] else: logger.debug('Getting query file %s' % query) file_queries[query] = self._get_from_file(query) self.query = file_queries[query] else: self.query = query if replace: self.query = self.query % replace self.params = params self.session = sessions.get(database) self.result = None self.model_class = model_class self.cached = cached self.query_cache = MemcachedCache(['127.0.0.1:11211']) self.cache_timeout = cache_timeout self.database = database
class Cache(): def __init__(self): if webapp.config['APP_ENV'] == 'dev': from werkzeug.contrib.cache import SimpleCache self.cache = SimpleCache() else: from werkzeug.contrib.cache import MemcachedCache self.cache = MemcachedCache(['127.0.0.1:11211']) def get(self, cache_key=''): if 'cache' in request.args and request.args.get('cache') == 'clear': user_data = session.get('_user', None) if user_data and user_data['is_admin']: return None return self.cache.get(cache_key) def set(self, cache_key='', data=None, timeout=1000): self.cache.set(cache_key, data, timeout) return True
def get_connection(self, app): servers = app.config.get('MEMCACHED_SERVERS') username = app.config.get('MEMCACHED_USERNAME', None) password = app.config.get('MEMCACHED_PASSWORD', None) cache = MemcachedCache(servers, username=username, password=password) if not cache: raise Exception( 'Memcached session cannot connect to memcached server.') return cache
def setup_cache(app): """ Setup ``app.cache``. """ # TODO: Support other cache type. servers = app.config.get('MEMCACHED_SERVERS', '').split() if not servers: servers = ['localhost:11211'] servers = memcache.LockingClient(servers) prefix = app.config.get('MEMCACHED_PREFIX', '') app.cache = MemcachedCache(servers=servers, key_prefix=prefix)
class Store(SessionStore): def __init__(self, session_class=None): super(Store, self).__init__(session_class) if settings.DATABASE_ENGINE == 'gae': self.cache = GAEMemcachedCache(default_timeout=0) else: server = settings.SESSION_OPTIONS.get('memcached_servers', []) self.cache = MemcachedCache(servers, default_timeout=0) def save(self, session): self.cache.set(session.sid, dict(session)) def delete(self, session): self.cache.delete(session.sid) def get(self, sid): if not self.is_valid_key(sid): return self.session_class.new() try: data = self.cache.get(sid) except: data = {} return self.session_class(data, sid, False) def list(self): return self.cache.get_dict().keys()
class RepoCache: def __init__(self, timeout=18000, version=None): cache_host = os.environ.get('CACHE_HOST') cache_port = os.environ.get('CACHE_PORT') # cache_username = os.environ.get('CACHE_USERNAME') # cache_password = os.environ.get('CACHE_PASSWORD') self._cache = MemcachedCache(['{}:{}'.format(cache_host, cache_port)]) # self._cache = bmemcached.Client( # [cache_host], username=cache_username, password=cache_password) self._timeout = timeout self._version = version def get_cache(self, key): return self._cache.get(key) def set_cache(self, key, value): return self._cache.set(key, value, self._timeout) def delete_cache(self, key): return self._cache.delete(key, self._version)
def include_sfc_acl(): """Includes ACL criteria in VNFFGD JSON arguments are: - sfc_uuid: the unique identifier of the SFC being composed - acl: a dict containing the acl criteria to be added into the vnffgd template :return: OK if success, or ERROR and its reason if not """ vnffgd = None cache = MemcachedCache() if 'sfc_uuid' in request.json: vnffgd = cache.get(request.json['sfc_uuid']) if not vnffgd: return jsonify({'status': ERROR, 'reason': 'SFC UUID not found!'}) acl = request.json['acl'] topology_template = vnffgd['vnffgd']['template']['vnffgd']['topology_template'] criteria = topology_template['node_templates']['Forwarding_path1'] \ ['properties']['policy']['criteria'] res, acl = acl_criteria_parser(acl) if res != OK: return jsonify({'status': ERROR, 'reason': acl}) for rule in acl: criteria.append(rule) #debug logger.debug('VNFFGD Template UUID: %s\n%s', request.json['sfc_uuid'], json.dumps(vnffgd, indent=4, sort_keys=True)) cache.set(request.json['sfc_uuid'], vnffgd) return jsonify({'status': OK})
def checkAuth(auth): (auth_type, auth_value, request) = auth cache = MemcachedCache(app.config['MEMCACHE_SERVERS'].split(",")) cache_key = "AUTH-%s+%s" % (auth_type, auth_value) # Auth is in the cache, go ahead sir if auth_type == 'APIKEY': return checkAuthAPI(cache, cache_key, auth_value, request) elif auth_type == 'LDAP': return checkAuthLDAP(cache, cache_key, auth_value, request) else: return False return False
def decorated_function(*args, **kwargs): cache = MemcachedCache(['127.0.0.1:11211']) key = '{url}{data}'.format(url=args[0], data=pickle.dumps(args)) hash_ = hashlib.md5(key.encode()).hexdigest() if not cache.has(hash_): status_code, cached_data = f(*args, **kwargs) if not kwargs.get('nocache', False) and status_code == 200: cache.set(hash_, (status_code, cached_data), timeout=5 * 60) else: status_code, cached_data = cache.get(hash_) return status_code, cached_data
def init_cache(cache_type="simple", memcached_servers=[], cache_dir=None, timeout=259200): ''' init_cache creates the oembed cache with the given cache type cache_type - 'simple', 'memcached', or 'file'. Determines which type of cache to use memcached_servers - List of memcached servers. Must be set if cache_type is 'memcached'. cache_dir - Directory for a file system cache. Must be set if cache_type is 'file'. timeout - Timeout in seconds. Default is 3 days. ''' global cache if cache_type == 'simple': cache = SimpleCache(default_timeout=timeout) elif cache_type == 'memcached': cache = MemcachedCache(servers=memcached_servers, default_timeout=timeout) elif cache_type == 'file': cache = FileSystemCache(cache_dir, default_timeout=timeout)
def get_cache(app): """ Attempt to find a valid cache from the Celery configuration If the setting is a valid cache, just use it. Otherwise, if Django is installed, then: If the setting is a valid Django cache entry, then use that. If the setting is empty use the default cache Otherwise, if Werkzeug is installed, then: If the setting is a valid Celery Memcache or Redis Backend, then use that. If the setting is empty and the default Celery Result Backend is Memcache or Redis, then use that Otherwise fail """ jobtastic_cache_setting = app.conf.get('JOBTASTIC_CACHE') if isinstance(jobtastic_cache_setting, BaseCache): return jobtastic_cache_setting if 'Django' in CACHES: if jobtastic_cache_setting: try: return WrappedCache(get_django_cache(jobtastic_cache_setting)) except InvalidCacheBackendError: pass else: return WrappedCache(get_django_cache('default')) if 'Werkzeug' in CACHES: if jobtastic_cache_setting: backend, url = get_backend_by_url(jobtastic_cache_setting) backend = backend(app=app, url=url) else: backend = app.backend if isinstance(backend, CacheBackend): return WrappedCache(MemcachedCache(backend.client)) elif isinstance(backend, RedisBackend): return WrappedCache(RedisCache(backend.client)) # Give up raise RuntimeError('Cannot find a suitable cache for Jobtastic')
def __init__(self, app, config_prefix='AUTHLIB', **kwargs): deprecate(DEPRECATE_MESSAGE, 0.7) self.config_prefix = config_prefix self.config = app.config cache_type = self._config('type') kwargs.update( dict(default_timeout=self._config('DEFAULT_TIMEOUT', 100))) if cache_type == 'null': self.cache = NullCache() elif cache_type == 'simple': kwargs.update(dict(threshold=self._config('threshold', 500))) self.cache = SimpleCache(**kwargs) elif cache_type == 'memcache': kwargs.update( dict( servers=self._config('MEMCACHED_SERVERS'), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = MemcachedCache(**kwargs) elif cache_type == 'redis': kwargs.update( dict( host=self._config('REDIS_HOST', 'localhost'), port=self._config('REDIS_PORT', 6379), password=self._config('REDIS_PASSWORD', None), db=self._config('REDIS_DB', 0), key_prefix=self._config('KEY_PREFIX', None), )) self.cache = RedisCache(**kwargs) elif cache_type == 'filesystem': kwargs.update(dict(threshold=self._config('threshold', 500), )) self.cache = FileSystemCache(self._config('DIR'), **kwargs) else: raise RuntimeError('`%s` is not a valid cache type!' % cache_type) app.extensions[config_prefix.lower() + '_cache'] = self.cache
def _setup_cache(app): """ If a test is being run or we don't want cache, NullCache will be initialized just as a dummy. If running locally without the 'DISABLE_CACHE' env variable and without a memcached instance running, MemcachedCache and it's underlying pylibmc will give no warning on connection, but will throw exceptions when trying to work with the cache. A few connection retires will be made in that scenario, and eventually the cache will be replaced with a NullCache. Binary communications must be used for SASL. """ # initialize the retry count if it's our first time here if not hasattr(app, 'cache_retry'): app.cache_retry = 0 # Setup cache if app.config['TESTING'] or os.environ.get('DISABLE_CACHE', None) is not None: app.cache = NullCache() app.logger.debug('Cache initialized as NullCache') else: MEMCACHED_SERVERS = os.environ.get('MEMCACHEDCLOUD_SERVERS', '127.0.0.1:11211') try: memcached_client = Client( servers=MEMCACHED_SERVERS.split(','), username=os.environ.get('MEMCACHEDCLOUD_USERNAME'), password=os.environ.get('MEMCACHEDCLOUD_PASSWORD'), binary=True) app.cache = MemcachedCache(memcached_client) app.logger.debug( 'Cache initialized as MemcachedCache with servers: %s', MEMCACHED_SERVERS) except Exception as e: # very unlikely to have an exception here. pylibmc mostly throws when trying to communicate, not connect app.logger.error('Error initializing MemcachedCache: %s', e) app.logger.error('Initializing cache as NullCache. Fix ASAP!') app.cache = NullCache()
def __init__(self, query, params=None, from_file=True, model_class=None, database='default', sessions=sessions, cached=False, replace=None, config=None, cache_timeout=3600*12): self.config = self.default_configuration self.config.update_recursive(configs.get('mysql', {})) if config: self.config.update_recursive(config) # add project path queries directory if not already configured project_queries = os.path.join(get_project_path(), 'queries') if not project_queries in self.config['query_directories']: self.config['query_directories'].append(project_queries) if from_file: if query in file_queries: logger.debug('Getting query file from query files cache %s' % query) self.query = file_queries[query] else: logger.debug('Getting query file %s' % query) file_queries[query] = self._get_from_file(query) self.query = file_queries[query] else: self.query = query if replace: self.query = self.query % replace self.params = params self.session = sessions.get(database) self.result = None self.model_class = model_class self.cached = cached self.query_cache = MemcachedCache(['127.0.0.1:11211']) self.cache_timeout = cache_timeout self.database = database
def test_sqlalchemy_auth_datastore_is_permission_in_roles_faster_using_memcached_cache( datastore, fixtures): # change cache class datastore.cache = MemcachedCache(servers=[environ['MEMCACHED_HOST']], key_prefix=environ['MEMCACHED_KEY']) roles = [ fixtures.Role(permissions=[fixtures.Permission() for _ in range(15)]) for _ in range(150) ] permission = fixtures.Permission(roles=roles[50:75]) assert not datastore.cache.get(str(permission.uuid)) start = datetime.now() assert datastore.is_permission_in_roles(permission_uuid=permission.uuid, role_uuids=[r.uuid for r in roles]) no_cache = datetime.now() - start assert datastore.cache.get(str( permission.uuid)) == {str(r.uuid) for r in roles[50:75]} start = datetime.now() assert datastore.is_permission_in_roles(permission_uuid=permission.uuid, role_uuids=[r.uuid for r in roles]) cache = datetime.now() - start assert datastore.cache.get(str( permission.uuid)) == {str(r.uuid) for r in roles[50:75]} print('MemcachedCache class - without cache: {} -- with cache: {}'.format( no_cache, cache)) assert cache < no_cache
from ecomap.config import Config _CONFIG = Config().get_config() TEMPLATE_FOLDER = os.path.join(os.environ['PRODROOT'], 'www/templates/') app = Flask(__name__, template_folder=TEMPLATE_FOLDER) Triangle(app) auto = Autodoc(app) logging.config.fileConfig(os.path.join(os.environ['CONFROOT'], '_log.conf')) logger = logging.getLogger('flask_app') app.config['SECRET_KEY'] = 'a7c268ab01141868811c070274413ea3c588733241659fcb' app.config["REMEMBER_COOKIE_DURATION"] = timedelta(days=14) # user time lib app.config['SECRET_KEY'] = _CONFIG['ecomap.secret_key'] app.config['CACHE_TYPE'] = 'memcached' app.config['SESSION_TYPE'] = 'memcached' app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=14) app.config['SESSION_MEMCACHED'] = MemcachedCache( _CONFIG['ecomap.memcached_servers']) app.config['CACHE_MEMCACHED_SERVERS'] = _CONFIG['ecomap.memcached_servers'] app.config['OAUTH_CREDENTIALS'] = { 'facebook': { 'id': _CONFIG['oauth.facebook_id'], 'secret': _CONFIG['oauth.facebook_secret'] } } Session(app) app.cache = Cache(app)
from database import db_session from models import State, City, Category, User from werkzeug.contrib.cache import MemcachedCache #Should rename objects to something less generic. import Objects cache = MemcachedCache(servers=['127.0.0.1:11211'], default_timeout=0) def stateCache(): res = db_session.query(State.name, State.abbr, State.id).filter_by(active=1).all(); myids = set() for i in res: cache.set('states:' + str(i[2]) + ':abbr', i[1], 0) cache.set('states:' + str(i[2]) + ':name', i[0], 0) cache.set('abbr:' + str(i[1]), str(i[2]), 0) myids.add(i[2]) cache.set('states:ids', myids, 0) def cityCache(stateId): res = db_session.query(City.name, City.id).filter_by(active=1, state_id=stateId).all(); mycityIds = set() for i in res: cache.set('city:' + str(i[1]) + ':name', i[0], 0) mycityIds.add(i[1]) cache.set('states:' + str(stateId) + ':cities', mycityIds, 0) def allStatesIndex(): allstates = list() cachedstates = cache.get('states:ids') for id in cachedstates:
import requests from urllib.parse import quote from werkzeug.contrib.cache import SimpleCache, MemcachedCache cache_timeout = 10 try: cache = MemcachedCache(default_timeout=cache_timeout) print("Started MemchachedCache") # Provoke failure if the service isn't running. cache.get('X') print("Managed to get from Memchached") except Exception as error: cache = SimpleCache(default_timeout=cache_timeout) print('Fell back on SimpleCache due to', error) class GoSource: # pragma: no cover """ Performs REST API requests to Go-server """ def __init__(self, base_go_url, auth): self.base_go_url = base_go_url self.auth = auth self.consecutive_cache_errors = 0 def simple_api_request(self, url, headers=None): response = self.api_request(url, headers) return self.unwrap_response(response) def simple_request(self, url, headers=None): response = self.base_request(url, headers)
import settings import memcache from flask import Flask from routes import routes from werkzeug.contrib.cache import MemcachedCache app = Flask(settings.APPLICATION_NAME, static_url_path='') app.secret_key = 'openstack_monitoring' app.register_blueprint(routes) servers = ['%s:%s' % (settings.MEMCACHED_HOST, str(settings.MEMCACHED_PORT))] memcache.SERVER_MAX_VALUE_LENGTH = 1024 * 1024 * 10 cache = MemcachedCache(servers) def setup_app(app): caching.load_servers() #setup_app(app) if __name__ == '__main__': try: app.run(debug=True) except Exception, e: import sys, traceback traceback.print_exc(file=sys.stdout) print str(e)
def purge(path, params): """ This is to invalidate cache items that are submitted. """ cache = MemcachedCache(app.config['MEMCACHE_SERVERS'].split(",")) # HORRIBLE HORRIBLE HORRIBLE # @todo the cache clearing needs reworked for a few reasons, # currently, only the entire cache can be purged, beacuse of the cache # key used, on update, the entire cache will be removed. cache.clear() return True # Each of the containers so the top level containers can be purged path_parts = path.split("/") # The starting point for purge requests base_path = "" # Hold a list of the keys to purge purge_keys = [] try: containers = params['containers'] except KeyError as e: containers = list() pass # Purge keys for the items in the tree up to the final one submitted while len(path_parts) > 0: base_path = base_path + "/" + path_parts.pop(0).upper() purge_keys.append(base_path) # Generate the URI's to purge based on the keyvals updated try: keyvals = params['keyvals'] except KeyError as e: keyvals = list() pass # Purge the container level items for the containers updated for item in containers: try: name = item['name'] except TypeError: name = item purge_keys.append(base_path + "/" + name) purge_keys.append(base_path + "/" + name + "?RETURN=ALL") purge_keys.append(base_path + "/operations/container/list" + base_path + name) purge_keys.append(base_path + "/operations/container/list" + base_path + name + "?RECURSIVE=TRUE") for i in range(app.config['TREE_MAX_RECURSION']): purge_keys.append(base_path + "/operations/container/list" + base_path + name + "?DEPTH=%s" %(str(i))) purge_keys.append(base_path + "/operations/container/list" + base_path + name + "?DEPTH=%s&RECURSIVE=TRUE" %(str(i))) # Purge all the keyvals in the container, with the proper tags (and for untagged items) for item in keyvals: param_path = base_path param_path = param_path + "?KEY=%s" % (item['key'].upper()) purge_keys.append(param_path) try: purge_keys.append(param_path + "&TAG=" + item['tag'].upper()) except KeyError as e: purge_keys.append(param_path + "&TAG=" + app.config['DEFAULT_TAG']) app.logger.debug("PURGING KEYS: %s" % ("|".join(purge_keys))) cache.delete_many(*purge_keys) return True
class Query(object): default_configuration = RecursiveDict({ 'query_directories': [], 'cached': False, 'execute_queries': True, }) def __init__(self, query, params=None, from_file=True, model_class=None, database='default', sessions=sessions, cached=False, replace=None, config=None, cache_timeout=3600*12): self.config = self.default_configuration self.config.update_recursive(configs.get('mysql', {})) if config: self.config.update_recursive(config) # add project path queries directory if not already configured project_queries = os.path.join(get_project_path(), 'queries') if not project_queries in self.config['query_directories']: self.config['query_directories'].append(project_queries) if from_file: if query in file_queries: logger.debug('Getting query file from query files cache %s' % query) self.query = file_queries[query] else: logger.debug('Getting query file %s' % query) file_queries[query] = self._get_from_file(query) self.query = file_queries[query] else: self.query = query if replace: self.query = self.query % replace self.params = params self.session = sessions.get(database) self.result = None self.model_class = model_class self.cached = cached self.query_cache = MemcachedCache(['127.0.0.1:11211']) self.cache_timeout = cache_timeout self.database = database def _gen_mem_key(self): params_string='' if self.params: ordered_values = OrderedDict(sorted(self.params.items(), key=lambda t: t[0])) for value in ordered_values: params_string = params_string +'_'+ value return str(hash(self.database+'_'+self.query+params_string)) def _get_from_file(self, query): for query_directory in self.config.get('query_directories'): for dirpath, dirnames, filenames in os.walk(query_directory, followlinks=True): for name in filenames: if '%s.sql' % query in os.path.join(dirpath, name): f = file(os.path.join(dirpath, name)) query = f.read() f.close() return query raise QueryFileNotFound('%s.sql not found in any of %s' % (query, self.config.get('query_directories'))) def _raw_query(self): if self.config.get('execute_queries'): self.result = self.session.execute(self.query, self.params) else: self.result = None def _raw_query_with_model(self): if self.config.get('execute_queries'): self.result = self.session.query(self.model_class).from_statement(self.query).params(**self.params) else: self.result = None def _query(self): if self.model_class: self._raw_query_with_model() else: self._raw_query() def execute(self): self._query() return self.result def one(self): cache_key = '' if self.cached: cache_key = self._gen_mem_key() cached_query = self.query_cache.get(cache_key) if cached_query: return cached_query self._query() if self.result: if self.model_class: _one = self.result.one() if self.cached: self.query_cache.set(cache_key, _one,self.cache_timeout) return _one else: fetch_one = self.result.fetchone() if self.cached: self.query_cache.set(cache_key, fetch_one, self.cache_timeout) return fetch_one if self.cached: self.query_cache.set(cache_key, self.result, self.cache_timeout) return self.result def all(self): cache_key = '' if self.cached: cache_key = self._gen_mem_key() cached_query = self.query_cache.get(cache_key) if cached_query: return cached_query self._query() if self.result: if self.model_class: _all = self.result.all() if self.cached: query_cache.set(cache_key, _all, self.cache_timeout) return _all else: fetch_all = self.result.fetchall() if self.cached: self.query_cache.set(cache_key, fetch_all, self.cache_timeout) return fetch_all if self.cached: self.query_cache.set(cache_key, self.result, self.cache_timeout) return self.result @staticmethod def last_insert_id(): return Query('last_insert_id').one()[0]
with open(config_file) as f: config = yaml.safe_load(f) # echo stats | nc localhost 11211 | egrep 'cmd_.et|curr_items' cfg = config.get("influxdb", {}) host = cfg.get("host", "localhost") port = cfg.get("port", 8086) user = cfg.get("user", "graphite") passw = cfg.get("pass", "graphite") db = cfg.get("db", "graphite") client = InfluxDBClient(host, port, user, passw, db) if config["cache"]["CACHE_TYPE"] == "memcached": cache = MemcachedCache(key_prefix=config["cache"]["CACHE_KEY_PREFIX"]) elif config["cache"]["CACHE_TYPE"] == "filesystem": cache = FileSystemCache(config["cache"]["CACHE_DIR"]) else: raise Exception("unsupported cache backend") while True: print "BEGIN LOOP" start_loop = time.time() # first off, load series as quick as we can section = "influxdb:: list series" print section start = time.time()