Exemplo n.º 1
0
Arquivo: ninja.py Projeto: zinw/l2py
 def __init__(self,
              session_class=None,
              servers=None,
              default_timeout=600,
              key_prefix=None):
     SessionStore.__init__(self, session_class)
     self.mc = MemcachedCache(servers, default_timeout, key_prefix)
Exemplo n.º 2
0
def setup_cache(app):
    app.cache = None
    servers = app.config.get('MEMCACHED_SERVERS')
    key_prefix = app.config.get('MEMCACHED_PREFIX')
    if servers:
        app.cache = MemcachedCache(servers=[servers], key_prefix=key_prefix)
        app.cache.set('sc-test', 'sc-value')
 def _memcache(self, **kwargs):
     """Returns a :class:`MemcachedCache` instance"""
     kwargs.update(dict(
         servers=self._config('MEMCACHED_SERVERS', None),
         key_prefix=self._config('key_prefix', None),
     ))
     return MemcachedCache(**kwargs)
Exemplo n.º 4
0
 def __init__(self):
     if webapp.config['APP_ENV'] == 'dev':
         from werkzeug.contrib.cache import SimpleCache
         self.cache = SimpleCache()
     else:
         from werkzeug.contrib.cache import MemcachedCache
         self.cache = MemcachedCache(['127.0.0.1:11211'])
Exemplo n.º 5
0
    def prepare(self):
        self.prepared = True

        # Cache
        if self.config.CACHE_TYPE == 'redis':
            self.cache = RedisCache(host=self.config.CACHE_SERV)
        elif self.config.CACHE_TYPE == 'memcached':
            self.cache = MemcachedCache(servers=[self.config.CACHE_SERV])
        else:
            self.cache = FileSystemCache(self.config.CACHE_SERV)

        # Options
        from .admin import Option
        self.options = Option.auto_load()

        # Timer
        @self.app.before_request
        def before_request():
            g.start = time.time()

        # Medias
        self.app.add_url_rule(self.app.config['UPLOAD_DIRECTORY_URL'] +
                              '<filename>',
                              'FyPress.uploaded_file',
                              build_only=True)
        self.app.wsgi_app = SharedDataMiddleware(
            self.app.wsgi_app, {
                self.app.config['UPLOAD_DIRECTORY_URL']:
                self.app.config['UPLOAD_DIRECTORY']
            })
Exemplo n.º 6
0
 def __set_cache(self):
     self.logger.debug("Setting up the cache")
     if hasattr(self.config,
                'cache') and "MEMCACHE_BACKENDS" in self.config.cache:
         self.cache = MemcachedCache(self.config.cache["MEMCACHE_BACKENDS"])
     else:
         self.cache = SimpleCache()
Exemplo n.º 7
0
def init_cache(app):
    servers = app.config.get('MEMCACHED_SERVERS', ['127.0.0.1:11211'])
    if not servers:
        servers = ['localhost:11211']

    prefix = app.config.get('MEMCACHED_PREFIX', '')
    app.cache = MemcachedCache(servers=servers, key_prefix=prefix)
Exemplo n.º 8
0
 def __init__(self, session_class=None):
     super(Store, self).__init__(session_class)
     if settings.DATABASE_ENGINE == 'gae':
         self.cache = GAEMemcachedCache(default_timeout=0)
     else:
         server = settings.SESSION_OPTIONS.get('memcached_servers', [])
         self.cache = MemcachedCache(servers, default_timeout=0)
Exemplo n.º 9
0
    def connect(self):
        try:
            conf = app.config.get('MEMCACHED')
            cache = MemcachedCache(**conf)
        except:
            cache = NullCache()

        return cache
Exemplo n.º 10
0
def get_the_cache():
    if app.config.get('CACHE_SERVER', None):
        from werkzeug.contrib.cache import MemcachedCache
        cache = MemcachedCache(app.config['CACHE_SERVER'])
    else:
        from werkzeug.contrib.cache import SimpleCache
        cache = SimpleCache()
    return cache
Exemplo n.º 11
0
 def test_default_werkzeug_cache(self):
     with self.settings(CELERY_ALWAYS_EAGER=False):
         app = Celery()
         app.config_from_object(settings)
         self.task.bind(app)
         app.finalize()
         async_task = self.task.delay(**self.kwargs)
         cache = MemcachedCache(app.backend.client)
         self.assertEqual(async_task.status, states.PENDING)
         self.assertNotEqual(cache.get('herd:%s' % self.key), None)
Exemplo n.º 12
0
    def get_connection(self, app):
        servers = app.config.get('MEMCACHED_SERVERS')
        username = app.config.get('MEMCACHED_USERNAME', None)
        password = app.config.get('MEMCACHED_PASSWORD', None)

        cache = MemcachedCache(servers, username=username, password=password)

        if not cache:
            raise Exception(
                'Memcached session cannot connect to memcached server.')
        return cache
Exemplo n.º 13
0
 def decorated_function(*args, **kwargs):
     cache = MemcachedCache(['127.0.0.1:11211'])
     key = '{url}{data}'.format(url=args[0], data=pickle.dumps(args))
     hash_ = hashlib.md5(key.encode()).hexdigest()
     if not cache.has(hash_):
         status_code, cached_data = f(*args, **kwargs)
         if not kwargs.get('nocache', False) and status_code == 200:
             cache.set(hash_, (status_code, cached_data), timeout=5 * 60)
     else:
         status_code, cached_data = cache.get(hash_)
     return status_code, cached_data
Exemplo n.º 14
0
def setup_cache(app):
    """
    Setup ``app.cache``.
    """
    # TODO: Support other cache type.
    servers = app.config.get('MEMCACHED_SERVERS', '').split()
    if not servers:
        servers = ['localhost:11211']
    servers = memcache.LockingClient(servers)

    prefix = app.config.get('MEMCACHED_PREFIX', '')
    app.cache = MemcachedCache(servers=servers, key_prefix=prefix)
Exemplo n.º 15
0
    def __init__(self, timeout=18000, version=None):
        cache_host = os.environ.get('CACHE_HOST')
        cache_port = os.environ.get('CACHE_PORT')
        # cache_username = os.environ.get('CACHE_USERNAME')
        # cache_password = os.environ.get('CACHE_PASSWORD')

        self._cache = MemcachedCache(['{}:{}'.format(cache_host, cache_port)])
        # self._cache = bmemcached.Client(
        # [cache_host], username=cache_username, password=cache_password)

        self._timeout = timeout
        self._version = version
Exemplo n.º 16
0
def get_sfc_uuid():
    """Retrieves a unique identifier in order to compose a SFC

        Retrieves a uuid4 identifier to compose a SFC and get a copy of the vnffg template.
    :return: a unique identifier str.
    """

    vnffgd = deepcopy(vnffgd_template)
    sfc_uuid = str(uuid.uuid4())

    cache = MemcachedCache()
    cache.set(sfc_uuid, vnffgd)

    return jsonify({'sfc_uuid': sfc_uuid})
Exemplo n.º 17
0
def checkAuth(auth):

    (auth_type, auth_value, request) = auth

    cache = MemcachedCache(app.config['MEMCACHE_SERVERS'].split(","))
    cache_key = "AUTH-%s+%s" % (auth_type, auth_value)
    # Auth is in the cache, go ahead sir
    if auth_type == 'APIKEY':
        return checkAuthAPI(cache, cache_key, auth_value, request)

    elif auth_type == 'LDAP':
        return checkAuthLDAP(cache, cache_key, auth_value, request)

    else:
        return False
    return False
Exemplo n.º 18
0
    def __init__(self,
                 query,
                 params=None,
                 from_file=True,
                 model_class=None,
                 database='default',
                 sessions=sessions,
                 cached=False,
                 replace=None,
                 config=None,
                 cache_timeout=3600 * 12):

        self.config = self.default_configuration
        self.config.update_recursive(configs.get('mysql', {}))
        if config:
            self.config.update_recursive(config)

        # add project path queries directory if not already configured
        project_queries = os.path.join(get_project_path(), 'queries')
        if not project_queries in self.config['query_directories']:
            self.config['query_directories'].append(project_queries)

        if from_file:
            if query in file_queries:
                logger.debug('Getting query file from query files cache %s' %
                             query)
                self.query = file_queries[query]
            else:
                logger.debug('Getting query file %s' % query)
                file_queries[query] = self._get_from_file(query)
                self.query = file_queries[query]
        else:
            self.query = query

        if replace:
            self.query = self.query % replace

        self.params = params
        self.session = sessions.get(database)
        self.result = None
        self.model_class = model_class
        self.cached = cached
        self.query_cache = MemcachedCache(['127.0.0.1:11211'])
        self.cache_timeout = cache_timeout
        self.database = database
Exemplo n.º 19
0
def init_cache(cache_type="simple",
               memcached_servers=[],
               cache_dir=None,
               timeout=259200):
    ''' init_cache creates the oembed cache with the given cache type

    cache_type - 'simple', 'memcached', or 'file'. Determines which type of cache to use
    memcached_servers - List of memcached servers. Must be set if cache_type is 'memcached'.
    cache_dir - Directory for a file system cache. Must be set if cache_type is 'file'.
    timeout - Timeout in seconds. Default is 3 days.
    '''
    global cache
    if cache_type == 'simple':
        cache = SimpleCache(default_timeout=timeout)
    elif cache_type == 'memcached':
        cache = MemcachedCache(servers=memcached_servers,
                               default_timeout=timeout)
    elif cache_type == 'file':
        cache = FileSystemCache(cache_dir, default_timeout=timeout)
Exemplo n.º 20
0
def get_cache(app):
    """
    Attempt to find a valid cache from the Celery configuration

    If the setting is a valid cache, just use it.
    Otherwise, if Django is installed, then:
        If the setting is a valid Django cache entry, then use that.
        If the setting is empty use the default cache
    Otherwise, if Werkzeug is installed, then:
        If the setting is a valid Celery Memcache or Redis Backend, then use
            that.
        If the setting is empty and the default Celery Result Backend is
            Memcache or Redis, then use that
    Otherwise fail
    """
    jobtastic_cache_setting = app.conf.get('JOBTASTIC_CACHE')
    if isinstance(jobtastic_cache_setting, BaseCache):
        return jobtastic_cache_setting

    if 'Django' in CACHES:
        if jobtastic_cache_setting:
            try:
                return WrappedCache(get_django_cache(jobtastic_cache_setting))
            except InvalidCacheBackendError:
                pass
        else:
            return WrappedCache(get_django_cache('default'))

    if 'Werkzeug' in CACHES:
        if jobtastic_cache_setting:
            backend, url = get_backend_by_url(jobtastic_cache_setting)
            backend = backend(app=app, url=url)
        else:
            backend = app.backend

        if isinstance(backend, CacheBackend):
            return WrappedCache(MemcachedCache(backend.client))
        elif isinstance(backend, RedisBackend):
            return WrappedCache(RedisCache(backend.client))

    # Give up
    raise RuntimeError('Cannot find a suitable cache for Jobtastic')
Exemplo n.º 21
0
def include_sfc_acl():
    """Includes ACL criteria in VNFFGD

    JSON arguments are:
        - sfc_uuid: the unique identifier of the SFC being composed
        - acl: a dict containing the acl criteria to be added into the vnffgd template

    :return: OK if success, or ERROR and its reason if not
    """
    vnffgd = None

    cache = MemcachedCache()

    if 'sfc_uuid' in request.json:
        vnffgd = cache.get(request.json['sfc_uuid'])

    if not vnffgd:
        return jsonify({'status': ERROR, 'reason': 'SFC UUID not found!'})

    acl = request.json['acl']
    topology_template = vnffgd['vnffgd']['template']['vnffgd']['topology_template']

    criteria = topology_template['node_templates']['Forwarding_path1'] \
                                ['properties']['policy']['criteria']

    res, acl = acl_criteria_parser(acl)

    if res != OK:
        return jsonify({'status': ERROR, 'reason': acl})

    for rule in acl:
        criteria.append(rule)

    #debug
    logger.debug('VNFFGD Template UUID: %s\n%s', request.json['sfc_uuid'], json.dumps(vnffgd, indent=4, sort_keys=True))

    cache.set(request.json['sfc_uuid'], vnffgd)

    return jsonify({'status': OK})
Exemplo n.º 22
0
def _setup_cache(app):
    """
    If a test is being run or we don't want cache, NullCache will be initialized just as a dummy.
    If running locally without the 'DISABLE_CACHE' env variable and without a memcached instance running,
    MemcachedCache and it's underlying pylibmc will give no warning on connection, but will throw
    exceptions when trying to work with the cache. A few connection retires will be made in that
    scenario, and eventually the cache will be replaced with a NullCache. Binary communications must
    be used for SASL.
    """

    # initialize the retry count if it's our first time here
    if not hasattr(app, 'cache_retry'):
        app.cache_retry = 0

    # Setup cache
    if app.config['TESTING'] or os.environ.get('DISABLE_CACHE',
                                               None) is not None:
        app.cache = NullCache()
        app.logger.debug('Cache initialized as NullCache')
    else:
        MEMCACHED_SERVERS = os.environ.get('MEMCACHEDCLOUD_SERVERS',
                                           '127.0.0.1:11211')

        try:
            memcached_client = Client(
                servers=MEMCACHED_SERVERS.split(','),
                username=os.environ.get('MEMCACHEDCLOUD_USERNAME'),
                password=os.environ.get('MEMCACHEDCLOUD_PASSWORD'),
                binary=True)
            app.cache = MemcachedCache(memcached_client)
            app.logger.debug(
                'Cache initialized as MemcachedCache with servers: %s',
                MEMCACHED_SERVERS)
        except Exception as e:
            # very unlikely to have an exception here. pylibmc mostly throws when trying to communicate, not connect
            app.logger.error('Error initializing MemcachedCache: %s', e)
            app.logger.error('Initializing cache as NullCache. Fix ASAP!')
            app.cache = NullCache()
Exemplo n.º 23
0
    def __init__(self, app, config_prefix='AUTHLIB', **kwargs):
        deprecate(DEPRECATE_MESSAGE, 0.7)

        self.config_prefix = config_prefix
        self.config = app.config

        cache_type = self._config('type')
        kwargs.update(
            dict(default_timeout=self._config('DEFAULT_TIMEOUT', 100)))

        if cache_type == 'null':
            self.cache = NullCache()
        elif cache_type == 'simple':
            kwargs.update(dict(threshold=self._config('threshold', 500)))
            self.cache = SimpleCache(**kwargs)
        elif cache_type == 'memcache':
            kwargs.update(
                dict(
                    servers=self._config('MEMCACHED_SERVERS'),
                    key_prefix=self._config('KEY_PREFIX', None),
                ))
            self.cache = MemcachedCache(**kwargs)
        elif cache_type == 'redis':
            kwargs.update(
                dict(
                    host=self._config('REDIS_HOST', 'localhost'),
                    port=self._config('REDIS_PORT', 6379),
                    password=self._config('REDIS_PASSWORD', None),
                    db=self._config('REDIS_DB', 0),
                    key_prefix=self._config('KEY_PREFIX', None),
                ))
            self.cache = RedisCache(**kwargs)
        elif cache_type == 'filesystem':
            kwargs.update(dict(threshold=self._config('threshold', 500), ))
            self.cache = FileSystemCache(self._config('DIR'), **kwargs)
        else:
            raise RuntimeError('`%s` is not a valid cache type!' % cache_type)
        app.extensions[config_prefix.lower() + '_cache'] = self.cache
def test_sqlalchemy_auth_datastore_is_permission_in_roles_faster_using_memcached_cache(
        datastore, fixtures):
    # change cache class
    datastore.cache = MemcachedCache(servers=[environ['MEMCACHED_HOST']],
                                     key_prefix=environ['MEMCACHED_KEY'])

    roles = [
        fixtures.Role(permissions=[fixtures.Permission() for _ in range(15)])
        for _ in range(150)
    ]
    permission = fixtures.Permission(roles=roles[50:75])

    assert not datastore.cache.get(str(permission.uuid))

    start = datetime.now()
    assert datastore.is_permission_in_roles(permission_uuid=permission.uuid,
                                            role_uuids=[r.uuid for r in roles])
    no_cache = datetime.now() - start

    assert datastore.cache.get(str(
        permission.uuid)) == {str(r.uuid)
                              for r in roles[50:75]}

    start = datetime.now()
    assert datastore.is_permission_in_roles(permission_uuid=permission.uuid,
                                            role_uuids=[r.uuid for r in roles])
    cache = datetime.now() - start

    assert datastore.cache.get(str(
        permission.uuid)) == {str(r.uuid)
                              for r in roles[50:75]}

    print('MemcachedCache class - without cache: {} -- with cache: {}'.format(
        no_cache, cache))

    assert cache < no_cache
Exemplo n.º 25
0
from ecomap.config import Config

_CONFIG = Config().get_config()

TEMPLATE_FOLDER = os.path.join(os.environ['PRODROOT'], 'www/templates/')
app = Flask(__name__, template_folder=TEMPLATE_FOLDER)
Triangle(app)
auto = Autodoc(app)

logging.config.fileConfig(os.path.join(os.environ['CONFROOT'], '_log.conf'))
logger = logging.getLogger('flask_app')
app.config['SECRET_KEY'] = 'a7c268ab01141868811c070274413ea3c588733241659fcb'
app.config["REMEMBER_COOKIE_DURATION"] = timedelta(days=14)  # user time lib
app.config['SECRET_KEY'] = _CONFIG['ecomap.secret_key']
app.config['CACHE_TYPE'] = 'memcached'
app.config['SESSION_TYPE'] = 'memcached'
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=14)
app.config['SESSION_MEMCACHED'] = MemcachedCache(
    _CONFIG['ecomap.memcached_servers'])
app.config['CACHE_MEMCACHED_SERVERS'] = _CONFIG['ecomap.memcached_servers']
app.config['OAUTH_CREDENTIALS'] = {
    'facebook': {
        'id': _CONFIG['oauth.facebook_id'],
        'secret': _CONFIG['oauth.facebook_secret']
    }
}

Session(app)
app.cache = Cache(app)
import settings
import memcache

from flask import Flask
from routes import routes
from werkzeug.contrib.cache import MemcachedCache

app = Flask(settings.APPLICATION_NAME, static_url_path='')
app.secret_key = 'openstack_monitoring'

app.register_blueprint(routes)

servers = ['%s:%s' % (settings.MEMCACHED_HOST, str(settings.MEMCACHED_PORT))]
memcache.SERVER_MAX_VALUE_LENGTH = 1024 * 1024 * 10

cache = MemcachedCache(servers)


def setup_app(app):
    caching.load_servers()


#setup_app(app)

if __name__ == '__main__':
    try:
        app.run(debug=True)
    except Exception, e:
        import sys, traceback
        traceback.print_exc(file=sys.stdout)
        print str(e)
from werkzeug.contrib.cache import MemcachedCache

from boogs import BugBuilder

import requests

from stooge.frontend.persona import verify_assertion
from stooge.frontend import app
from stooge.frontend.mozillians import lookup_mozillian

client = MongoClient()
users = client.stooge.users
scans = client.stooge.scans
sites = client.stooge.sites

cache = MemcachedCache(['127.0.0.1:11211'])

# This is horrible. Just to be able to set a Cache-Control header on
# static content. Look for something better.


def root_dir():
    return os.path.abspath(os.path.dirname(__file__))


def get_static_file(filename):  # pragma: no cover
    src = os.path.join(root_dir(), 'static', filename)
    with open(src) as f:
        return f.read()

Exemplo n.º 28
0
import requests
from urllib.parse import quote
from werkzeug.contrib.cache import SimpleCache, MemcachedCache

cache_timeout = 10
try:
    cache = MemcachedCache(default_timeout=cache_timeout)
    print("Started MemchachedCache")
    # Provoke failure if the service isn't running.
    cache.get('X')
    print("Managed to get from Memchached")
except Exception as error:
    cache = SimpleCache(default_timeout=cache_timeout)
    print('Fell back on SimpleCache due to', error)


class GoSource:  # pragma: no cover
    """
    Performs REST API requests to Go-server
    """
    def __init__(self, base_go_url, auth):
        self.base_go_url = base_go_url
        self.auth = auth
        self.consecutive_cache_errors = 0

    def simple_api_request(self, url, headers=None):
        response = self.api_request(url, headers)
        return self.unwrap_response(response)

    def simple_request(self, url, headers=None):
        response = self.base_request(url, headers)
Exemplo n.º 29
0
with open(config_file) as f:
    config = yaml.safe_load(f)

# echo stats | nc localhost 11211 | egrep 'cmd_.et|curr_items'

cfg = config.get('influxdb', {})
host = cfg.get('host', 'localhost')
port = cfg.get('port', 8086)
user = cfg.get('user', 'graphite')
passw = cfg.get('pass', 'graphite')
db = cfg.get('db', 'graphite')

client = InfluxDBClient(host, port, user, passw, db)

if config['cache']['CACHE_TYPE'] == 'memcached':
    cache = MemcachedCache(key_prefix=config['cache']['CACHE_KEY_PREFIX'])
elif config['cache']['CACHE_TYPE'] == 'filesystem':
    from os import listdir
    from os.path import isfile, join
    cache_dir = "%s_tmp" % config['cache']['CACHE_DIR']
    cache_dir_real = config['cache']['CACHE_DIR']
    cache = FileSystemCache(cache_dir)
else:
    raise Exception("unsupported cache backend")


while True:

    print "BEGIN LOOP"
    start_loop = time.time()
Exemplo n.º 30
0
MEMCACHE_URL = os.environ.get('MEMCACHE_URL', '127.0.0.1:11211').split(',')
DEBUG = os.environ.get('DEBUG', False) in ('true', '1')
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URI',
                                         'sqlite:////tmp/kanbanzilla.db')

DAY = 60 * 60 * 24
MONTH = DAY * 30

app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = SQLALCHEMY_DATABASE_URI
db = SQLAlchemy(app)

login_url = 'https://bugzilla.mozilla.org/index.cgi'
bugzilla_url = 'https://api-dev.bugzilla.mozilla.org/latest'

cache = MemcachedCache(MEMCACHE_URL)

COLUMNS = [
    {
        "name": "Backlog",
        "statuses": ["NEW", "UNCONFIRMED"]
    },
    {
        "name": "Needs Investigation",
        "statuses": []
    },
    {
        "name": "Ready to work on",
        "statuses": []
    },
    {