예제 #1
0
    def test_client_behaviors(self):
        config = {
            "cache.lock_dir": "./lock",
            "cache.data_dir": "./cache",
            "cache.type": "ext:memcached",
            "cache.url": mc_url,
            "cache.memcache_module": "pylibmc",
            "cache.protocol": "binary",
            "cache.behavior.ketama": "True",
            "cache.behavior.cas": False,
            "cache.behavior.receive_timeout": "3600",
            "cache.behavior.send_timeout": 1800,
            "cache.behavior.tcp_nodelay": 1,
            "cache.behavior.auto_eject": "0",
        }
        cache_manager = CacheManager(**parse_cache_config_options(config))
        cache = cache_manager.get_cache("test_behavior", expire=6000)

        with cache.namespace.pool.reserve() as mc:
            assert "ketama" in mc.behaviors
            assert mc.behaviors["ketama"] == 1
            assert "cas" in mc.behaviors
            assert mc.behaviors["cas"] == 0
            assert "receive_timeout" in mc.behaviors
            assert mc.behaviors["receive_timeout"] == 3600
            assert "send_timeout" in mc.behaviors
            assert mc.behaviors["send_timeout"] == 1800
            assert "tcp_nodelay" in mc.behaviors
            assert mc.behaviors["tcp_nodelay"] == 1
            assert "auto_eject" in mc.behaviors
            assert mc.behaviors["auto_eject"] == 0
예제 #2
0
def make_app(global_conf,
             pub_key,
             key_type='RSA',
             cookie_name=None,
             hdr_prefix=None,
             log_name=None,
             **app_conf):
    """Paste application factory"""

    pub_key = RSA.load_pub_key(
        pub_key) if key_type == 'RSA' else DSA.load_pub_key(pub_key)
    params = {}
    if cookie_name is not None:
        params['cookie_name'] = cookie_name
    if hdr_prefix is not None:
        params['hdr_prefix'] = hdr_prefix
    if log_name is not None:
        params['log_name'] = log_name
    cache_opts = parse_cache_config_options(app_conf)
    if cache_opts.get('enabled') == True:
        cache_mgr = CacheManager(**cache_opts)
        cache = cache_mgr.get_cache('tickets_cache')
        params['cache'] = cache

    return AuthRequestApp(pub_key, **params)
예제 #3
0
    def test_client_behaviors(self):
        config = {
            "cache.lock_dir": "./lock",
            "cache.data_dir": "./cache",
            "cache.type": "ext:memcached",
            "cache.url": mc_url,
            "cache.memcache_module": "pylibmc",
            "cache.protocol": "binary",
            "cache.behavior.ketama": "True",
            "cache.behavior.cas": False,
            "cache.behavior.receive_timeout": "3600",
            "cache.behavior.send_timeout": 1800,
            "cache.behavior.tcp_nodelay": 1,
            "cache.behavior.auto_eject": "0",
        }
        cache_manager = CacheManager(**parse_cache_config_options(config))
        cache = cache_manager.get_cache("test_behavior", expire=6000)

        with cache.namespace.pool.reserve() as mc:
            assert "ketama" in mc.behaviors
            assert mc.behaviors["ketama"] == 1
            assert "cas" in mc.behaviors
            assert mc.behaviors["cas"] == 0
            assert "receive_timeout" in mc.behaviors
            assert mc.behaviors["receive_timeout"] == 3600
            assert "send_timeout" in mc.behaviors
            assert mc.behaviors["send_timeout"] == 1800
            assert "tcp_nodelay" in mc.behaviors
            assert mc.behaviors["tcp_nodelay"] == 1
            assert "auto_eject" in mc.behaviors
            assert mc.behaviors["auto_eject"] == 0
예제 #4
0
    def test_client_behaviors(self):
        config = {
            'cache.lock_dir': './lock',
            'cache.data_dir': './cache',
            'cache.type': 'ext:memcached',
            'cache.url': mc_url,
            'cache.memcache_module': 'pylibmc',
            'cache.protocol': 'binary',
            'cache.behavior.ketama': 'True',
            'cache.behavior.cas': False,
            'cache.behavior.receive_timeout': '3600',
            'cache.behavior.send_timeout': 1800,
            'cache.behavior.tcp_nodelay': 1,
            'cache.behavior.auto_eject': "0"
        }
        cache_manager = CacheManager(**parse_cache_config_options(config))
        cache = cache_manager.get_cache('test_behavior', expire=6000)

        with cache.namespace.pool.reserve() as mc:
            assert "ketama" in mc.behaviors
            assert mc.behaviors["ketama"] == 1
            assert "cas" in mc.behaviors
            assert mc.behaviors["cas"] == 0
            assert "receive_timeout" in mc.behaviors
            assert mc.behaviors["receive_timeout"] == 3600
            assert "send_timeout" in mc.behaviors
            assert mc.behaviors["send_timeout"] == 1800
            assert "tcp_nodelay" in mc.behaviors
            assert mc.behaviors["tcp_nodelay"] == 1
            assert "auto_eject" in mc.behaviors
            assert mc.behaviors["auto_eject"] == 0
예제 #5
0
	def init_cache(self):
		cache_opts = {
				'cache.type': 'file',
				'cache.data_dir': '/tmp/cache/data',
				'cache.lock_dir': '/tmp/cache/lock'}
		cm = CacheManager(**parse_cache_config_options(cache_opts))
		return cm.get_cache('schlagzeile', expire=600)
예제 #6
0
 def test_client_behaviors(self):
     config = {
         'cache.lock_dir':'./lock', 
         'cache.data_dir':'./cache',  
         'cache.type':'ext:memcached', 
         'cache.url':mc_url,
         'cache.memcache_module':'pylibmc', 
         'cache.protocol':'binary', 
         'cache.behavior.ketama': 'True', 
         'cache.behavior.cas':False, 
         'cache.behavior.receive_timeout':'3600',
         'cache.behavior.send_timeout':1800, 
         'cache.behavior.tcp_nodelay':1,
         'cache.behavior.auto_eject':"0"
     }
     cache_manager = CacheManager(**parse_cache_config_options(config))
     cache = cache_manager.get_cache('test_behavior', expire=6000)
     
     with cache.namespace.pool.reserve() as mc:
         assert "ketama" in mc.behaviors
         assert mc.behaviors["ketama"] == 1
         assert "cas" in mc.behaviors
         assert mc.behaviors["cas"] == 0
         assert "receive_timeout" in mc.behaviors
         assert mc.behaviors["receive_timeout"] == 3600
         assert "send_timeout" in mc.behaviors
         assert mc.behaviors["send_timeout"] == 1800
         assert "tcp_nodelay" in mc.behaviors
         assert mc.behaviors["tcp_nodelay"] == 1
         assert "auto_eject" in mc.behaviors
         assert mc.behaviors["auto_eject"] == 0
예제 #7
0
class BeakerCache(object):
    def __init__(self):
        cache_opts = {
            'cache.type': 'memory',
            #             'cache.expire':1000000000
            #  'cache.type': 'file',
            # 'cache.data_dir': '/tmp/cache/data',
            #'cache.lock_dir': '/tmp/cache/lock'
        }
        self.cache = CacheManager(**parse_cache_config_options(cache_opts))

    def get(self, key, region):
        return self.cache.get_cache(region).get(key)

    def put(self, key, value, region):
        self.cache.get_cache(region).put(key, value)

    def remove(self, key, region):
        self.cache.get_cache(region).remove_value(key)

    def hasKey(self, key, region):
        return self.cache.get_cache(region).has_key(key)

    def getRegion(self, region):
        return self.cache.get_cache(region)
예제 #8
0
파일: cache.py 프로젝트: Py-AMS/pyams-utils
def get_cache(name, region, namespace):
    """Get Beaker cache matching region and namespace"""
    try:
        cache = getattr(_CACHES, name)
    except AttributeError:
        manager = CacheManager(**cache_regions[region])
        cache = manager.get_cache(namespace)
        setattr(_CACHES, name, cache)
    return cache
예제 #9
0
def get_cache(name, data_folder):
    cache_opts = {
        'cache.type': 'file',
        'cache.data_dir': data_folder + 'cache',
        'cache.lock_dir': data_folder + 'cache-lock'
    }

    cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    cache = cache_manager.get_cache(name, type='file', expire=30)
    return cache
예제 #10
0
 def test_flush_data(self):
     cache_opts = {
         'cache.type': 'memory',
         'cache.regions': 'public.data'
     }
     cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
     params = {}
     params['stateCode'] = 'NC'
     mycache = cache_manager.get_cache('my_namespace', **params)
     f = FakeFunc('hello')
     cache = mycache.get("my_namespace {'stateCode': 'NC'}", createfunc=f.fake)
     self.assertEqual(cache, f.msg)
     f.msg = 'bye'
     self.assertNotEqual(cache, f.msg)
     cache_flush_data()
     f.msg = 'bye'
     mycache = cache_manager.get_cache('my_namespace', **params)
     cache = mycache.get("my_namespace {'stateCode': 'NC'}", createfunc=f.fake)
     self.assertEqual(cache, f.msg)
예제 #11
0
파일: session.py 프로젝트: shyba/lbryumx
def setup_caching(data_dir):
    cache_opts = {
        'cache.type': 'dbm',
        'cache.data_dir': data_dir,
        'cache.lock_dir': data_dir,
        'cache.regions': 'short_term, long_term',
        'cache.short_term.type': 'dbm',
    }

    cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    short_term_cache = cache_manager.get_cache('short_term', expire=240)
    return short_term_cache
예제 #12
0
def setup_caching(config):
    cache_type = config.get('caching', 'type')
    data_dir = config.get('caching', 'data_dir')
    short_expire = config.get('caching', 'short_expire')
    long_expire = config.get('caching', 'long_expire')

    cache_opts = {
        'cache.type': cache_type,
        'cache.data_dir': data_dir,
        'cache.lock_dir': data_dir,
        'cache.regions': 'short_term, long_term',
        'cache.short_term.type': cache_type,
        'cache.short_term.expire': short_expire,
        'cache.long_term.type': cache_type,
        'cache.long_term.expire': long_expire,
    }

    cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    short_term_cache = cache_manager.get_cache('short_term', expire=short_expire)
    long_term_cache = cache_manager.get_cache('long_term', expire=long_expire)
    return short_term_cache, long_term_cache
예제 #13
0
def setup_caching(config):
    cache_type = config.get('caching', 'type')
    data_dir = config.get('caching', 'data_dir')
    short_expire = config.get('caching', 'short_expire')
    long_expire = config.get('caching', 'long_expire')

    cache_opts = {
        'cache.type': cache_type,
        'cache.data_dir': data_dir,
        'cache.lock_dir': data_dir,
        'cache.regions': 'short_term, long_term',
        'cache.short_term.type': cache_type,
        'cache.short_term.expire': short_expire,
        'cache.long_term.type': cache_type,
        'cache.long_term.expire': long_expire,
    }

    cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    short_term_cache = cache_manager.get_cache('short_term',
                                               expire=short_expire)
    long_term_cache = cache_manager.get_cache('long_term', expire=long_expire)
    return short_term_cache, long_term_cache
	def __init__(self, namespace, **nsargs):
		super().__init__(namespace, **nsargs)
		cache_opts = {
			'cache.type': 'file',
			'cache.data_dir': 'cache/data',
			'cache.lock_dir': 'cache/lock',
			'cache.short_term.type': 'ext:memcached',
			'cache.short_term.url': '127.0.0.1.11211',
			'cache.short_term.expire': '3600',
			'cache.long_term.type': 'file',
			'cache.long_term.expire': '86400'
		}
		cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
		self.cache = cache_manager.get_cache(namespace, type='dbm')
예제 #15
0
class SessionManager(object):

    def __init__(self, type='memory', **kwargs):
        opts = kwargs or {
            'data_dir': '/tmp/messengerbot-cache/data',
            'lock_dir': '/tmp/messengerbot-cache/lock'
        }
        opts['type'] = type
        self.cachemgr = CacheManager(**opts)

    def get_session(self, event):
        ns = '.'.join([event['recipient']['id'],event['sender']['id']])
        cache = self.cachemgr.get_cache(ns)
        return Session(cache)
예제 #16
0
    def test_cache(self):
        import time
        from beaker.cache import CacheManager
        cm = CacheManager()
        cache = cm.get_cache('auth_pubtkt_middleware', type='memory', expire=3600)
        app = DumbApp()
        app = AuthPubTKTMiddleware(app, rsa_pub, cache=cache)
        env = {}
        env['REMOTE_ADDR'] = '192.168.1.10'
        env['REQUEST_METHOD'] = 'GET'
        env['HTTP_COOKIE'] = 'auth_pubtkt="uid=user1;validuntil=1277190189;cip=192.168.1.10;tokens=editor,moderator;graceperiod=3600;udata=custom data;[email protected];display_name=John;sig=YaMhb5yXkfqOtQ87P5gYeh4kSgQev1c6XjqT0pXT/ojXj/qpswpyqWenNv3y5rcUPT++80zZPBVNFfwPUI5Crps5nHZP55FNPtBE337KYZ6KYoMEVQD6xqnouf5i1Jm5KwB1IfQdr8fvRQs2oqBIMMTkVyfv6yRRNWVPz+7xwxw="'

        
        app(env, dumb_start_response)
        app(env, dumb_start_response)
예제 #17
0
 def __init__(self, config):
     """Creates a cache using the supplied configuration parameters or defaults.
     """
     self.enableCache = (config.get('wmscapabilitycache.enable', 'True').lower() == 'true')
     if self.enableCache:
         cache_opts = {
             'cache.expire': config.get('wmscapabilitycache.expire', None),
             'cache.type': config.get('wmscapabilitycache.type', 'file'),
             'cache.data_dir': config.get('wmscapabilitycache.data_dir', '/tmp/ecomaps/wmscapabilitycache/data'),
             'cache.lock_dir': config.get('wmscapabilitycache.lock_dir', None)
             }
         cacheMgr = CacheManager(**parse_cache_config_options(cache_opts))
 
         self.cache = cacheMgr.get_cache('getWmsCapabilities')
     log.info("WMS capability caching %s" % ("enabled" if self.enableCache else "disabled"))
예제 #18
0
    def __init__(self, app,
                 cookie_cache=True,
                 cache_opts=None,
                 debug=False,
                 cookie_max_age=0,
                 classifiers=[]):
        self.debug = debug
        self.cookie_cache = cookie_cache
        cache_manager = CacheManager(
            **parse_cache_config_options(cache_opts or
                                         self.DEFAULT_CACHE_OPTIONS))
        self.cache = cache_manager.get_cache('mobi.devices')

        if self.debug:
            logger.info('MobiDeviceMiddleware start in debug mode.')
        self.app = app
        self.set_cookie_max_age(int(cookie_max_age))
        self.classifiers = classifiers if isinstance(classifiers, list) \
                else [classifiers]
예제 #19
0
def init_cache(config):
    """
    Called by enviroment.py after most of the Pylons setup is done.
    """

    if config['beaker.cache.enabled']:
        from beaker.cache import CacheManager
        from beaker.util import parse_cache_config_options
        cache_manager = CacheManager(**parse_cache_config_options(config))

        # AllanC - no point in having buckets, these can be represented by a single cache and managed by redis
        #          but without putting them in a dict they cant be imported .. the dict serves as a reference to the conructed objects
        for bucket in [
                'members', 'contents', 'contents_index', 'members_index',
                'messages_index', 'content_show', 'members_show'
        ]:
            _cache[bucket] = cache_manager.get_cache(bucket)
            if config[
                    'development_mode']:  # We don't want to clear the cache on every server update. This could lead to the server undergoing heavy load as ALL the cache is rebuilt. This could be removed if it causes a problem
                _cache[bucket].clear()
예제 #20
0
    def __init__(self):
        cache_opts = {
            'cache.type': 'memory',
            'cache.data_dir': './applications/chimitheque/cache/',
            'cache.lock_dir': './applications/chimitheque/cache/'
        }
        #cache_opts = {
        #    'cache.type': 'memory'
        #    'cache.type': 'file'
        #}
        cache = CacheManager(**parse_cache_config_options(cache_opts))
        # user id has a unique key to store the MESSAGE in cache
        try:
            uid = 'cache_console_%s' %str(current.auth.user.id)
        except AttributeError:
            uid = ''
        # getting the cache
        self.tmpl_cache = cache.get_cache(uid, expire=None)

        self.logger = logging.getLogger('web2py.app.chimitheque')
예제 #21
0
    def prepareData(self, controller, container):
        container.totalPostsCount = 0
        mstat = False
        vts = False
        userStats = (0, 0)
        chTime = g.OPT.statsCacheTime

        if chTime > 0:
            cm = CacheManager(type = 'memory')
            cch = cm.get_cache('home_stats')
            container.totalPostsCount = cch.get_value(key = "totalPosts", createfunc = Post.getPostsCount, expiretime = chTime)
            mstat = cch.get_value(key = "mainStats", createfunc = Tag.getStats, expiretime = chTime)
            userStats = cch.get_value(key = "userStats", createfunc = User.getStats, expiretime = chTime)
            vts = cch.get_value(key = "vitalSigns", createfunc = Post.vitalSigns, expiretime = chTime)
        else:
            container.totalPostsCount = Post.getPostsCount()
            userStats = User.getStats()
            mstat = Tag.getStats()
            vts = Post.vitalSigns()

        def taglistcmp(a, b):
            return cmp(b.count, a.count) or cmp(a.board.tag, b.board.tag)

        container.totalUsersCount = userStats[0]
        container.bannedUsersCount = userStats[1]

        container.boards = sorted(mstat.boards, taglistcmp)
        container.tags = sorted(mstat.tags, taglistcmp)
        container.stags = sorted(mstat.stags, taglistcmp)
        container.totalBoardsThreads = mstat.totalBoardsThreads
        container.totalBoardsPosts = mstat.totalBoardsPosts
        container.totalTagsThreads = mstat.totalTagsThreads
        container.totalTagsPosts = mstat.totalTagsPosts
        container.totalSTagsThreads = mstat.totalSTagsThreads
        container.totalSTagsPosts = mstat.totalSTagsPosts

        container.last1KUsersCount = vts.last1KUsersCount
        container.prev1KUsersCount = vts.prev1KUsersCount
        container.lastWeekMessages = vts.lastWeekMessages
        container.prevWeekMessages = vts.prevWeekMessages
예제 #22
0
    def __init__(self,
                 app,
                 cookie_cache=True,
                 cache_opts=None,
                 debug=False,
                 cookie_max_age=0,
                 classifiers=[]):
        self.debug = debug
        self.cookie_cache = cookie_cache
        cache_manager = CacheManager(**parse_cache_config_options(
            cache_opts or self.DEFAULT_CACHE_OPTIONS))
        self.cache = cache_manager.get_cache('mobi.devices')

        if self.debug:
            logger.info(u'MobiDeviceMiddleware start in debug mode.')
        self.app = app
        self.set_cookie_max_age(int(cookie_max_age))

        if not isinstance(classifiers, list):
            classifiers = [classifiers]

        self.classifiers = classifiers
예제 #23
0
    def checkCond(self, userInst):
        """
        print self.__getUserAge(userInst)
        print self.__getAllPostsCount(userInst)
        print self.__getRecentPostsCount(userInst)
        print self.__checkLastGivenInvite(userInst)
        """
        obligatoryCondition = not(userInst.Anonymous) and \
                (self.__getUserAge(userInst) >= g.OPT.minimalAge) and \
                (self.__getAllPostsCount(userInst) >= g.OPT.minimalPostsCount) and \
                (self.__checkLastGivenInvite(userInst))

        if obligatoryCondition:
            recentPostsCount = self.__getRecentPostsCount(userInst)
            cm = CacheManager(type = 'memory')
            cch = cm.get_cache('home_stats')
            cacheTime = getattr(g.OPT, 'statsCacheTime', 30)
            vts = cch.get_value(key = "vitalSigns", createfunc = Post.vitalSigns, expiretime = cacheTime)

            return (recentPostsCount >= g.OPT.minimalRecentPostsCount) and \
                   (100.0 * recentPostsCount) / vts.lastWeekMessages >= g.OPT.recentPostsPercentage
        return None
예제 #24
0
def make_app(global_conf,
             pub_key,
             key_type='RSA',
             cookie_name=None,
             hdr_prefix=None,
             log_name=None,
             **app_conf):
    """Paste application factory"""
    
    pub_key = RSA.load_pub_key(pub_key) if key_type == 'RSA' else DSA.load_pub_key(pub_key)
    params = {}
    if cookie_name is not None:
        params['cookie_name'] = cookie_name
    if hdr_prefix is not None:
        params['hdr_prefix'] = hdr_prefix
    if log_name is not None:
        params['log_name'] = log_name
    cache_opts = parse_cache_config_options(app_conf)
    if cache_opts.get('enabled') == True:
        cache_mgr = CacheManager(**cache_opts)
        cache = cache_mgr.get_cache('tickets_cache')
        params['cache'] = cache

    return AuthRequestApp(pub_key, **params)
예제 #25
0
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from oslo_log import log as logging

from deckhand.conf import config
from deckhand.engine import layering

CONF = config.CONF
LOG = logging.getLogger(__name__)

_CACHE_OPTS = {
    'cache.type': 'memory',
    'expire': CONF.engine.cache_timeout,
}
_CACHE = CacheManager(**parse_cache_config_options(_CACHE_OPTS))
_DOCUMENT_RENDERING_CACHE = _CACHE.get_cache('rendered_documents_cache')


def lookup_by_revision_id(revision_id, documents, **kwargs):
    """Look up rendered documents by ``revision_id``."""
    def do_render():
        """Perform document rendering for the revision."""
        document_layering = layering.DocumentLayering(documents, **kwargs)
        return document_layering.render()

    if CONF.engine.enable_cache:
        return _DOCUMENT_RENDERING_CACHE.get(key=revision_id,
                                             createfunc=do_render)
    else:
        return do_render()
예제 #26
0
import time
from datetime import datetime

from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

from app import app
from ..base import Sensor
from prod_config import ENDOMONDO_EMAIL, ENDOMONDO_PASSWORD
from .client import MobileApi

log = logging.getLogger(__name__)

opts = {'cache.type': 'file', 'cache.file_dir': '.cache', 'cache.data_dir': '.cache'}
cache_manager = CacheManager(**parse_cache_config_options(opts))
cache = cache_manager.get_cache('endomondo_data', type='file', expire=3600*24)


def datetime_to_integer_unixtime(dt):
    """
    Converting from datetime type to unixtime
    """
    try:
        return int(time.mktime(dt.timetuple()))
    except AttributeError:  # pragma: no cover
        raise TypeError(
            'datetime_to_unixtime expects datetime object, got %s instead' % type(dt),
        )


class EndomondoSensor(Sensor):
예제 #27
0
import time
from datetime import datetime

from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

from app import app
from ..base import Sensor
from prod_config import ENDOMONDO_EMAIL, ENDOMONDO_PASSWORD
from .client import MobileApi

log = logging.getLogger(__name__)

opts = {"cache.type": "file", "cache.file_dir": ".cache", "cache.data_dir": ".cache"}
cache_manager = CacheManager(**parse_cache_config_options(opts))
cache = cache_manager.get_cache("endomondo_data", type="file", expire=3600 * 24)


def datetime_to_integer_unixtime(dt):
    """
    Converting from datetime type to unixtime
    """
    try:
        return int(time.mktime(dt.timetuple()))
    except AttributeError:  # pragma: no cover
        raise TypeError("datetime_to_unixtime expects datetime object, got %s instead" % type(dt))


class EndomondoSensor(Sensor):
    LOOP_DELAY = 600
    ERRORS_THRESHOLD = 2
예제 #28
0
파일: drifter.py 프로젝트: larsks/drifter
class Drifter(object):
    def __init__(self, user_config_file=None, project_config_file=None):

        self.user_config_file = user_config_file \
                if user_config_file \
                else DEFAULT_USER_CONFIG
        self.project_config_file = project_config_file \
                if project_config_file \
                else DEFAULT_PROJECT_CONFIG

        self.setup_logging()
        self.load_user_config()
        self.load_project_config()
        self.setup_cache()
        self.setup_locks()
        self.create_client()

    def setup_locks(self):
        '''Create locks used for synchronizing parallel
        execution (e.g. when creating instances).'''
        self.net_lock = Lock()

    def setup_logging(self):
        '''Create a logger for this Drifter instance.'''
        self.log = logging.getLogger('drifter')

    def setup_cache(self):
        '''Set up a beaker cache manager.'''
        self.cachemgr = CacheManager(**parse_cache_config_options(cache_opts))
        self.image_cache = self.cachemgr.get_cache('images', expires=1800)
        self.flavor_cache = self.cachemgr.get_cache('flavors', expires=1800)

    def qualify(self, name):
        '''Return <name>.<project_name>.<os_username> given <name>.'''
        return '%s.%s.%s' % (
            name,
            self.config['project_name'],
            self.config['os_username'],
        )

    def instances(self):
        '''This is a generate that yields drifter.instance.Instance objects
        for each instance in your drifter configuration.'''

        defaults = self.config['instances'].get('default', {})
        for name, config in self.config['instances'].items():
            if name == 'default':
                continue
            if config is None:
                config = {}

            yield Instance(self, name, config, defaults)

    def create_client(self):
        '''Creates a novaclient.v1_1.client.Client for this
        Drifter instance.'''

        self.client = novaclient.v1_1.client.Client(
            self.config['os_username'],
            self.config['os_password'],
            self.config['os_tenant_name'],
            self.config['os_auth_url'],
            service_type="compute")

    def load_config(self, path):
        '''Load a YAML configuration file.  The file is first parsed
        by the Jinja2 templat engine and then passed to the YAML
        parser.
        
        Returns the result of yaml.load()'''

        self.log.info('loading configuration from %s', path)
        with open(path) as fd:
            tmpl = jinja2.Template(fd.read())

        return yaml.load(tmpl.render())

    def load_user_config(self):
        '''Load configuration from user_config_file (generally
        ~/.drifter.yml).'''

        self.config = self.load_config(self.user_config_file)['drifter']

    def load_project_config(self):
        '''Load configuration from project_config_file (generally
        ./project.yml).'''

        self.config.update(
            self.load_config(self.project_config_file)['project'])

    @ratelimit
    def create_security_group(self, name):
        '''Given <name>, either create and return a new security group
        named <name> or return the existing security group with the same
        name.'''

        self.log.info('creating security group %s', self.qualify(name))
        try:
            group = self.client.security_groups.find(name=self.qualify(name))
        except novaclient.exceptions.NotFound:
            group = self.client.security_groups.create(
                self.qualify(name), '%s security group' % name)

        return group

    @ratelimit
    def create_security_group_rule(self, group, rule):
        try:
            sr = self.client.security_group_rules.create(
                group.id,
                ip_protocol=rule['protocol'],
                from_port=rule['from port'],
                to_port=rule['to port'])
        except novaclient.exceptions.BadRequest:
            # This probably means that the rule already exists.
            pass

    def create_security_group_rules(self, group, rules):
        '''Provision security group <group> with rules from <rules>'''

        self.log.info('adding rules to security group %s', group.name)
        for rule in rules:
            rule = Rule(rule)
            self.create_security_group_rule(group, rule)

    def create_security_groups(self):
        '''Create and provision all security groups defined in the
        configuration.'''

        for name, rules in self.config['security groups'].items():
            group = self.create_security_group(name)
            self.create_security_group_rules(group, rules)

    def delete_security_group(self, name):
        '''Delete the named security group.  If it does not exist, ignore
        the error.'''
        self.log.info('deleting security group %s', self.qualify(name))
        try:
            group = self.client.security_groups.find(name=self.qualify(name))
            self.client.security_groups.delete(group.id)
        except novaclient.exceptions.NotFound:
            pass

    def delete_security_groups(self):
        '''Delete all security groups defined in the configuration.'''
        for name, rules in self.config['security groups'].items():
            self.delete_security_group(name)

    def find_instance(self, name):
        '''Return the Instance object for a named instance.  Raises
        KeyError if no matching instance can be found.'''
        for i in self.instances():
            if i['name'] == name:
                return i

        raise KeyError(name)

    def create_instance(self, instance):
        '''Create an instance and assign an ip address.'''

        self.create_client()

        # Don't try to create instances that
        # have already booted.
        if instance.status != 'down':
            self.log.warn('ignore create request -- this instance is not down')
            return

        instance.create()
        instance.assign_ip()

    def create_instances(self):
        '''Create all instances defined in the configuration.'''
        defaults = self.config['instances'].get('default', {})

        tasks = []
        for instance in self.instances():
            t = Process(target=self.create_instance,
                        args=(instance, ),
                        name='create-%(name)s' % instance)
            t.start()
            tasks.append(t)

        self.log.debug('waiting for tasks')
        while tasks:
            tasks[0].join()
            t = tasks.pop(0)
            self.log.debug('task %s completed', t)

    def delete_instance(self, instance):
        '''Delete a single instance.'''
        instance.delete()

    def delete_instances(self):
        '''Delete all instances defined in the configuration.'''
        defaults = self.config['instances'].get('default', {})

        for instance in self.instances():
            self.delete_instance(instance)

    def find_image(self, image):
        def _find_image():
            return self.client.images.find(name=image).id

        id = self.image_cache.get(key=image, createfunc=_find_image)
        self.log.debug('got id=%s for image=%s', id, image)
        return self.client.images.get(id)

    def find_flavor(self, flavor):
        def _find_flavor():
            return self.client.flavors.find(name=flavor).id

        id = self.flavor_cache.get(key=flavor, createfunc=_find_flavor)
        self.log.debug('got id=%s for flavor=%s', id, flavor)
        return self.client.flavors.get(id)

    def all_up(self):
        '''Return True if all instances are active.'''
        return all(i.status == 'active' for i in self.instances())

    def all_down(self):
        '''Return True if all instances are down.'''
        return all(i.status == 'down' for i in self.instances())

    def wait_for_up(self):
        '''Wait for all instances to become active.'''
        self.log.info('waiting for instances to start')
        while not self.all_up():
            time.sleep(1)
        self.log.info('instances are started')

    def wait_for_down(self):
        '''Wait for all instances to become down.'''
        self.log.info('waiting for instances to stop')
        while not self.all_down():
            time.sleep(1)
        self.log.info('instances are down')

    def check(self):
        '''Return a (name, status) tuple for all instances defined
        in the configuration.'''
        return [(x['name'], x.status) for x in self.instances()]
예제 #29
0
class CacheManagerExt(Component):
    """To cache resource"""
    def get_cache(self, key, createfunc):
        """Get cached data of the returns of createfunc depending on the key.
        If key and createfunc exist in cache, returns the cached data,
        otherwise caches the returns of createfunc and returns data.

        :type key: String
        :param key: key name, present the unique key each time caching

        :type createfunc: function object
        :param createfunc: only the name of function, have no parameters,
            its return type can be any basic object, like String, int, tuple, list, dict, etc.

        :rtype: String
        :return: the value mapped to the key

        :example:
            CacheManager.get_cache(key="abc", createfunc=func)

        """
        results = self.tmpl_cache.get(key=key, createfunc=createfunc)
        return results

    def invalidate(self, key):
        """remove the key-value pair in the cache

        :type key: String
        :param key: key name, present the unique key each time caching

        :rtype: bool
        :return: True if remove the key-value pair correctly, otherwise False

        """
        try:
            self.tmpl_cache.remove_value(key=key)
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def clear(self):
        """clear all the cache

        :rtype: bool
        :return: True if clear the cache correctly, otherwise False
        """
        try:
            self.tmpl_cache.clear()
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def __init__(self):
        """initialize the class CacheManager
        More configuration refer to http://beaker.readthedocs.org/en/latest/caching.html#about
        """
        # store the basic configuration
        self.cache_opts = {
            'cache.type': 'memory',
            # can be "memory" or "file"
            'cache.data_dir': '/tmp/cache/data',
            'cache.lock_dir': '/tmp/cache/lock'
        }
        # create CacheManager instance with cache_opts
        self.cache = CacheManager(
            **parse_cache_config_options(self.cache_opts))
        # In addition to the defaults supplied to the CacheManager instance,
        # any of the Cache options can be changed on a per-namespace basis,
        # by setting a type, and expire option.
        self.tmpl_cache = self.cache.get_cache('mytemplate',
                                               type='file',
                                               expire=3600)
예제 #30
0
class CacheManagerExt(Component):
    """To cache resource"""
    def get_cache(self, key, createfunc):
        """Get cached data of the returns of createfunc depending on the key.
        If key and createfunc exist in cache, returns the cached data,
        otherwise caches the returns of createfunc and returns data.

        :type key: String
        :param key: key name, present the unique key each time caching

        :type createfunc: function object
        :param createfunc: only the name of function, have no parameters,
            its return type can be any basic object, like String, int, tuple, list, dict, etc.

        :rtype: String
        :return: the value mapped to the key

        :example:
            CacheManager.get_cache(key="abc", createfunc=func)

        """
        results = self.tmpl_cache.get(key=key, createfunc=createfunc)
        return results

    def invalidate(self, key):
        """remove the key-value pair in the cache

        :type key: String
        :param key: key name, present the unique key each time caching

        :rtype: bool
        :return: True if remove the key-value pair correctly, otherwise False

        """
        try:
            self.tmpl_cache.remove_value(key=key)
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def clear(self):
        """clear all the cache

        :rtype: bool
        :return: True if clear the cache correctly, otherwise False
        """
        try:
            self.tmpl_cache.clear()
            return True
        except Exception as e:
            self.log.error(e)
            return False

    def __init__(self):
        """initialize the class CacheManager
        More configuration refer to http://beaker.readthedocs.org/en/latest/caching.html#about
        """
        # store the basic configuration
        self.cache_opts = {
            'cache.type': 'file',
            # can be "memory" or "file"
            'cache.data_dir': '/tmp/cache/data',
            'cache.lock_dir': '/tmp/cache/lock'
        }
        # create CacheManager instance with cache_opts
        self.cache = CacheManager(**parse_cache_config_options(self.cache_opts))
        # In addition to the defaults supplied to the CacheManager instance,
        # any of the Cache options can be changed on a per-namespace basis,
        # by setting a type, and expire option.
        self.tmpl_cache = self.cache.get_cache('mytemplate', type='file', expire=3600)
예제 #31
0
from    beaker.cache          import CacheManager
from    beaker.util           import parse_cache_config_options, func_namespace
from    beakerfiddling.addone import addone
import  os

here = os.path.dirname(os.path.abspath(__file__))
c = ConfigParser.ConfigParser(defaults={'here': here})
c.read(os.path.join(here, 'development.ini'))
cache_manager = CacheManager(**parse_cache_config_options(dict(c.items('app:main'))))

print(addone(0))
print(addone(0))

print(addone(9))
print(addone(9))

cache_manager.region_invalidate(addone, 'region1', 0)
print(addone(0))
print(addone(0))

print(addone(9))
print(addone(9))

# Now interrogate the cache -- see if it has values for 0 and 9 (it
# should, of course).

c = cache_manager.get_cache(func_namespace(addone.original_function))

for n in range(10):
    print("{}: {}".format(n, "Present" if str(n) in c else "absent"))
예제 #32
0
class CacheManager(grok.GlobalUtility):
    """ a cache manager that wraps beaker
    """
    grok.implements(ICacheManager)
    grok.provides(ICacheManager)

    default_region_options = {
        'lock_dir': '/tmp/beaker',
        'type': 'memorylru',
        'max_items': '1000',
    }

    def __init__(self):
        self._parse_config()
        self.bcm = BCM(**self._parse_config())
        self.regions = self.bcm.regions

    def get_cache_from_region(self, namespace, region):
        try:
            return self.bcm.get_cache_region(namespace, region)
        except BeakerException as e:
            logger.warn('no specific configuration for region %s'
                        ' using defaults (%s) : %s',
                        region, repr(self.default_region_options), e)
            self._create_region_from_default(region)
            return self.bcm.get_cache_region(namespace, region)

    def get_cache(self, namespace, **options):
        # Doesn't rely on region for configuration.
        return self.bcm.get_cache(namespace, **options)

    def _parse_config(self):
        zconf = getattr(getConfiguration(), 'product_config', {})
        cache_config = zconf.get('silva.core.cache', {})
        regions = {}
        for key, value in cache_config.iteritems():
            if '.' in key:
                try:
                    region, param = key.split('.', 1)
                except ValueError:
                    continue

                if region not in regions:
                    regions[region] = {}
                regions[region][param] = value

        if regions.has_key('default'):
            self.default_region_options = regions['default']
            del regions['default']

        options = self.default_region_options.copy()
        options['cache_regions'] = regions
        return options

    def _create_region_from_default(self, region):
        """ Create the region with default
        """
        options = self.default_region_options.copy()
        self.regions[region] = options
        self.bcm.regions.update({region: options})
        beaker.cache.cache_regions.update({region: options})
예제 #33
0
from flask import Blueprint, request
import os
import json
import requests
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from utils.yt import get_popular_video_youtube, search_video_youtube

# set up cache system
cache_opts = {
    'cache.type': 'dbm',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}
cache = CacheManager(**parse_cache_config_options(cache_opts))
tmpl_cache = cache.get_cache('facebook_bot_cache', type='dbm', expire=1800)

# setup encoding and absolute root path
ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '.'))

fb_bot = Blueprint('fb_bot', __name__, None)
fb_bot.config = {}


@fb_bot.record
def get_fb_config(setup_state):

    global FB_GRAPH_API, FB_MSG_API_URL, VERIFY_TOKEN, PAGE_ACCESS_TOKEN
    app = setup_state.app
    fb_bot.config = dict([(key, value) for (key, value) in app.config.items()])
    # FB App settings
예제 #34
0
파일: drifter.py 프로젝트: larsks/drifter
class Drifter (object):
    def __init__ (self,
            user_config_file=None,
            project_config_file=None):

        self.user_config_file = user_config_file \
                if user_config_file \
                else DEFAULT_USER_CONFIG
        self.project_config_file = project_config_file \
                if project_config_file \
                else DEFAULT_PROJECT_CONFIG

        self.setup_logging()
        self.load_user_config()
        self.load_project_config()
        self.setup_cache()
        self.setup_locks()
        self.create_client()

    def setup_locks(self):
        '''Create locks used for synchronizing parallel
        execution (e.g. when creating instances).'''
        self.net_lock = Lock()

    def setup_logging(self):
        '''Create a logger for this Drifter instance.'''
        self.log = logging.getLogger('drifter')

    def setup_cache(self):
        '''Set up a beaker cache manager.'''
        self.cachemgr = CacheManager(**parse_cache_config_options(cache_opts))
        self.image_cache = self.cachemgr.get_cache('images', expires=1800)
        self.flavor_cache = self.cachemgr.get_cache('flavors', expires=1800)

    def qualify(self, name):
        '''Return <name>.<project_name>.<os_username> given <name>.'''
        return '%s.%s.%s' % (
                name,
                self.config['project_name'],
                self.config['os_username'],
                )

    def instances(self):
        '''This is a generate that yields drifter.instance.Instance objects
        for each instance in your drifter configuration.'''

        defaults = self.config['instances'].get('default', {})
        for name, config in self.config['instances'].items():
            if name == 'default':
                continue
            if config is None:
                config = {}

            yield Instance(self, name, config, defaults)

    def create_client(self):
        '''Creates a novaclient.v1_1.client.Client for this
        Drifter instance.'''

        self.client = novaclient.v1_1.client.Client(
                self.config['os_username'],
                self.config['os_password'],
                self.config['os_tenant_name'],
                self.config['os_auth_url'],
                service_type="compute")

    def load_config(self, path):
        '''Load a YAML configuration file.  The file is first parsed
        by the Jinja2 templat engine and then passed to the YAML
        parser.
        
        Returns the result of yaml.load()'''

        self.log.info('loading configuration from %s', path)
        with open(path) as fd:
            tmpl = jinja2.Template(fd.read())

        return yaml.load(tmpl.render())

    def load_user_config(self):
        '''Load configuration from user_config_file (generally
        ~/.drifter.yml).'''

        self.config = self.load_config(self.user_config_file)['drifter']

    def load_project_config(self):
        '''Load configuration from project_config_file (generally
        ./project.yml).'''

        self.config.update(self.load_config(self.project_config_file)['project'])

    @ratelimit
    def create_security_group(self, name):
        '''Given <name>, either create and return a new security group
        named <name> or return the existing security group with the same
        name.'''

        self.log.info('creating security group %s', self.qualify(name))
        try:
            group = self.client.security_groups.find(name=self.qualify(name))
        except novaclient.exceptions.NotFound:
            group = self.client.security_groups.create(
                    self.qualify(name),
                    '%s security group' % name)

        return group

    @ratelimit
    def create_security_group_rule(self, group, rule):
            try:
                sr = self.client.security_group_rules.create(
                        group.id,
                        ip_protocol=rule['protocol'],
                        from_port=rule['from port'],
                        to_port=rule['to port'])
            except novaclient.exceptions.BadRequest:
                # This probably means that the rule already exists.
                pass

    def create_security_group_rules(self, group, rules):
        '''Provision security group <group> with rules from <rules>'''

        self.log.info('adding rules to security group %s', group.name)
        for rule in rules:
            rule = Rule(rule)
            self.create_security_group_rule(group, rule)


    def create_security_groups(self):
        '''Create and provision all security groups defined in the
        configuration.'''

        for name, rules in self.config['security groups'].items():
            group = self.create_security_group(name)
            self.create_security_group_rules(group, rules)

    def delete_security_group(self, name):
        '''Delete the named security group.  If it does not exist, ignore
        the error.'''
        self.log.info('deleting security group %s', self.qualify(name))
        try:
            group = self.client.security_groups.find(name=self.qualify(name))
            self.client.security_groups.delete(group.id)
        except novaclient.exceptions.NotFound:
            pass

    def delete_security_groups(self):
        '''Delete all security groups defined in the configuration.'''
        for name, rules in self.config['security groups'].items():
            self.delete_security_group(name)

    def find_instance(self, name):
        '''Return the Instance object for a named instance.  Raises
        KeyError if no matching instance can be found.'''
        for i in self.instances():
            if i['name'] == name:
                return i

        raise KeyError(name)

    def create_instance(self, instance):
        '''Create an instance and assign an ip address.'''
        
        self.create_client()

        # Don't try to create instances that
        # have already booted.
        if instance.status != 'down':
            self.log.warn('ignore create request -- this instance is not down')
            return

        instance.create()
        instance.assign_ip()

    def create_instances(self):
        '''Create all instances defined in the configuration.'''
        defaults = self.config['instances'].get('default', {})

        tasks = []
        for instance in self.instances():
            t = Process(
                    target=self.create_instance,
                    args=(instance,),
                    name='create-%(name)s' % instance)
            t.start()
            tasks.append(t)

        self.log.debug('waiting for tasks')
        while tasks:
            tasks[0].join()
            t = tasks.pop(0)
            self.log.debug('task %s completed', t)

    def delete_instance(self, instance):
        '''Delete a single instance.'''
        instance.delete()

    def delete_instances(self):
        '''Delete all instances defined in the configuration.'''
        defaults = self.config['instances'].get('default', {})

        for instance in self.instances():
            self.delete_instance(instance)

    def find_image(self, image):
        def _find_image():
            return self.client.images.find(name=image).id
        id = self.image_cache.get(key=image, createfunc=_find_image)
        self.log.debug('got id=%s for image=%s', id, image)
        return self.client.images.get(id)

    def find_flavor(self, flavor):
        def _find_flavor():
            return self.client.flavors.find(name=flavor).id
        id = self.flavor_cache.get(key=flavor, createfunc=_find_flavor)
        self.log.debug('got id=%s for flavor=%s', id, flavor)
        return self.client.flavors.get(id)

    def all_up(self):
        '''Return True if all instances are active.'''
        return all(i.status == 'active' for i in self.instances())

    def all_down(self):
        '''Return True if all instances are down.'''
        return all(i.status == 'down' for i in self.instances())

    def wait_for_up(self):
        '''Wait for all instances to become active.'''
        self.log.info('waiting for instances to start')
        while not self.all_up():
            time.sleep(1)
        self.log.info('instances are started')

    def wait_for_down(self):
        '''Wait for all instances to become down.'''
        self.log.info('waiting for instances to stop')
        while not self.all_down():
            time.sleep(1)
        self.log.info('instances are down')

    def check(self):
        '''Return a (name, status) tuple for all instances defined
        in the configuration.'''
        return [(x['name'], x.status) for x in self.instances()]
예제 #35
0
import os
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

dot = os.path.dirname(__file__)

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': os.path.join(dot,'url_shortener_data'),
    'cache.lock_dir': os.path.join(dot,'url_shortener_lock'),
}

cache = CacheManager(**parse_cache_config_options(cache_opts))
tcache = cache.get_cache("stuff", type='dbm') #, expire=3600)


abuse_opts = {
    'cache.type': 'file',
    'cache.data_dir': os.path.join(dot,'abuser_data'),
    'cache.lock_dir': os.path.join(dot,'abuser_lock'),
}

abuse = CacheManager(**parse_cache_config_options(abuse_opts))
tabuse = abuse.get_cache("stuff", type='dbm') #, expire=5)

예제 #36
0
from .parsing import get_as_bs_tree, parse_categories

import appdirs
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options


CACHE_DIR = appdirs.user_cache_dir(APP_NAME, APP_AUTHOR)
CACHE_OPTS = {
    'cache.type': 'file',
    'cache.data_dir': CACHE_DIR + '/data',
    'cache.lock_dir': CACHE_DIR + '/lock',
    'cache.expire': CACHE_EXPIRY
}
CACHE_MANAGER = CacheManager(**parse_cache_config_options(CACHE_OPTS))
CATEGORY_CACHE = CACHE_MANAGER.get_cache('category')


def clear_cache():
    CATEGORY_CACHE.clear()


def _all_categories():
    """Cache helper for all_categories."""
    cats_tree = get_as_bs_tree('http://www.digikey.com/product-search/en')
    categories = parse_categories(cats_tree)
    return categories


def all_categories():
    """Get all Digi-Key categories."""
예제 #37
0
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from config import CACHE


cache = CacheManager(**parse_cache_config_options(CACHE.CACHE_OPTS))
branch_cache = cache.get_cache('branch')
attribute_cache = cache.get_cache('attribute')
예제 #38
0
	def createCacheFile():
        cache = CacheManager(**parse_cache_config_options(cache_opts))
		tmpl_cache = cache.get_cache('mytemplate', type='file', expire=5)
예제 #39
0
import random
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

APP_PATH = os.environ.get('APP_PWD')
CACHE_FILE = APP_PATH + '/cache/popular_youtube.json'
YT_KEY = str(os.environ.get('YOUTUBE_API_KEY'))
YT_API_ENDPOINT = 'https://www.googleapis.com/youtube/v3/'
# set up cache system
cache_opts = {
    'cache.type': 'dbm',
    'cache.data_dir': APP_PATH + '/cache/data',
    'cache.lock_dir': APP_PATH + '/cache/lock'
}
cache = CacheManager(**parse_cache_config_options(cache_opts))
tmpl_cache = cache.get_cache('template', type='dbm', expire=1800)


def get_popular_video_youtube(limit=30, random_videos=False, country='US'):
    '''
    Get a number of popular youtube video by country

    Args:

        limit (int, optional): The number of video to receive. Defaults to 30.
        random_videos (bool, optional): Random or not. Defaults to False.
        country (str, optional): Defaults to 'US'.

    Returns:

        json: a list with: id, url, title, thumb
예제 #40
0
파일: theme.py 프로젝트: larsks/dropblog
#!/usr/bin/python

import os
import sys

from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': os.path.join(os.path.abspath('data'), 'cache'),
    'cache.lock_dir': os.path.join(os.path.abspath('data'), 'lock'),
}

cache = CacheManager(**parse_cache_config_options(cache_opts))
theme_cache = cache.get_cache('themes')

def get_theme_html(theme):
    pass

def get_theme_css(theme):
    pass

예제 #41
0
 def __init__(self, name, config):
     cacheMgr = CacheManager(**parse_cache_config_options(config))
     self.cache = cacheMgr.get_cache(name)
예제 #42
0
 def init_app(self, app):
     self.logger.info("Initializing Cache %s", pformat(app.config['CACHE']))
     cachemanager = CacheManager(
         **parse_cache_config_options(app.config['CACHE']))
     cache = cachemanager.get_cache('cache')
     app.extensions['cache'] = cache
예제 #43
0
"""
The cache settings have to come from the plugged application...

Or will the cache simply be standalone?
After all, is there any real need for this
to be defined in the plugged app itself
"""


from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

cache_mgr = CacheManager(**parse_cache_config_options(cache_opts))
cache = cache_mgr.get_cache('elfinder')


예제 #44
0
# List of Valid file extensions to check against, if any of these don't match, this means our regex didn't quite
# parse the link correctly to pull out the real file link.
# Note: check for presence in list is also lowercased
VALID_FILE_EXTENSIONS = ['jpg', 'jpeg', 'gif', 'png', 'ico', 'bmp', 'svg']

# 6x gives us a decent boost in speed, and should hopefully offset some of the delays via sleeps
threadLimiter = threading.BoundedSemaphore(6)

# lock for printing to console
printing_lock = threading.Lock()

# create an in memory cache for storing hostnames, so that we can throttle requests on recently seen
# hostnames, to avoid potentially getting flagged for flooding servers. 15 seconds seems a reasonable amount of time
# to make sure we throttle on
cache_manager = CacheManager(**parse_cache_config_options({'cache.type': 'memory'}))
hostname_cache = cache_manager.get_cache('hostnames', type='memory', expire=15)

DEBUG_MODE = False


# defines the downloader threads so that we can download images in a concurrent way
class DownloadThread(threading.Thread):
    def __init__(self, word_downloader, href_attribute, link_index, base_path_for_word, user_agent, verbose_mode):
        self.word_downloader = word_downloader
        self.href_attribute = href_attribute
        self.link_index = link_index
        self.base_path_for_word = base_path_for_word
        self.user_agent = user_agent
        self.verbose_mode = verbose_mode
        threading.Thread.__init__(self)
예제 #45
0
파일: orm.py 프로젝트: ecostadaluz/core
import uuid
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

from decimal import Decimal#depois apagar isto quando tirar daqui o to_decimal
import traceback
import ujson

cache_opts = {
    'cache.type': 'memory',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

cache = CacheManager(**parse_cache_config_options(cache_opts))
erp_cache = cache.get_cache('erp_cache', type='memory', expire=10)
short_cache = cache.get_cache('short_cache', type='memory', expire=10)


def get_context(window_id):
    #print('Im on get_context', window_id)
    with open('../tmp/{window_id}ctx.json'.format(window_id=window_id), mode='r' , encoding='utf-8') as json_file:
        json_string = json_file.read()
        #print (json_string)
        ctx_dict = ujson.loads(json_string)
    return ctx_dict


def set_context(window_id, ctx_dict):
    #print('Im on set_context', window_id)
    #print (ctx_dict)
예제 #46
0
import bobo
import webob
from random import  sample
from string import digits, ascii_letters
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

cache = CacheManager(**parse_cache_config_options(cache_opts))
tcache = cache.get_cache("stuff", type='dbm', expire=3600)


def short_id(num):
    return "".join(sample(digits + ascii_letters, num))


@bobo.post('/')
def post(url):
    id = short_id(5)
    tcache.put(id, url)
    return id


@bobo.query('/:short', method="GET")
def query(short):
    if short in tcache:
예제 #47
0
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options
from oslo_log import log as logging

from deckhand.conf import config

CONF = config.CONF
LOG = logging.getLogger(__name__)

_CACHE_OPTS = {
    'cache.type': 'memory',
    'expire': CONF.barbican.cache_timeout,
}
_CACHE = CacheManager(**parse_cache_config_options(_CACHE_OPTS))
_BARBICAN_CACHE = _CACHE.get_cache('barbican_cache')

# NOTE(felipemonteiro): The functions below realize a lookup and reverse-lookup
# to allow for much faster retrieval of encrypted data from Barbican, which
# doesn't currently support batched requests in its Secrets API. This behavior
# is necessary since Deckhand has to potentially retrieve and store up to
# dozens of secrets per request. Note that data for both lookup functions
# below are invalidated together, as they are tied to the same cache.


def lookup_by_ref(barbicanclient, secret_ref):
    """Look up secret object using secret reference.

    Allows for quick lookup of secret payloads using ``secret_ref`` via
    caching.
    """
예제 #48
0
파일: search.py 프로젝트: aknott/scearch
pool = gevent.pool.Pool(40)

import soundcloud
from time import time, strptime, mktime,clock
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': 'cache/data',
    'cache.lock_dir': 'cache/lock'
}

#Cache stores search results for 24 hours
cm = CacheManager(**parse_cache_config_options(cache_opts))
cache = cm.get_cache('trackcache', type='dbm', expire=3600*24)


client = soundcloud.Client(client_id='af912f440f0d027065e7351089b08a52')

def getPlaysPer(track):
	created_time = strptime(track.created_at[:-6],"%Y/%m/%d %H:%M:%S")
	plays_per = track.playback_count / ((time() - mktime(created_time)) / (3600*24))
	return plays_per

def getHype(track):
	if(track.playback_count > 500):
		hyperatio = float(track.favoritings_count) / float(track.playback_count)
		playsper = getPlaysPer(track)
		hype = (track.playback_count*playsper)**(hyperatio)
		return hype
예제 #49
0
파일: source.py 프로젝트: jedahu/flog
class Source:
  def __init__(self, path, expire):
    cache_opts = {
        'cache.type': 'dbm',
        'cache.data_dir': path,
        'cache.expire': expire
        }
    self.cache_manager = CacheManager(**parse_cache_config_options(cache_opts))
    self.etag_cache = self.cache_manager.get_cache('etags', expire=365*24*60*60)
    self.fn_cache = self.cache_manager.get_cache('processed')
    self.url_cache = self.cache_manager.get_cache('urls')
    self.id_cache = self.cache_manager.get_cache('id')

  def url_cache_get_or_abort(self, url, code):
    try:
      val = self.url_cache._get_value(url)
      if val.has_value():
        ret = val._get_value()
        if ret:
          return ret
      raise SourceError(code)
    except Exception:
      raise SourceError(code)

  def url_cache_get_or_raise(self, url, error):
    val = self.url_cache._get_value(url)
    if val.has_value():
      val.namespace.acquire_read_lock()
      try:
        _stored, _expired, ret = val._get_value()
        if ret:
          return ret
      except Exception:
        raise error
      finally:
        val.namespace.release_read_lock()
    raise error

  def cache(self, *args, **kwargs):
    return self.cache_manager.cache(*args, **kwargs)

  def cache_with_id(self, key):
    def decorate(fn):
      def wrapper(*args, **kwargs):
        def create_id_cache_value():
          return fn(*args, **kwargs)
        return self.id_cache.get(key=key, createfunc=create_id_cache_value)

  def source(self, url):
    def decorate(fn):
      def wrapper(*args, **kwargs):
        def create_url_cache_value():
          headers = {}
          stored_etag = self.etag_cache.get(key=url, createfunc=lambda:None)
          if stored_etag:
            headers = {'If-None-Match': stored_etag}
          request = urllib2.Request(url, headers=headers)
          error = None
          error_code = None
          try:
            response = urllib2.urlopen(request)
          except urllib2.HTTPError, e:
            error_code = e.code
            error = e
          except urllib2.URLError, e:
            error = e
          if error_code == 304:
            return self.url_cache_get_or_raise(url, error)
          if error_code in (404, 410, 451):
            return flask.abort(e.code)
          if error:
            return self.url_cache_get_or_raise(url, error)
          etag = response.info().getheader('ETag', None)
          if etag:
            self.etag_cache.put(key=url, value=etag)
          return response.read()

        def create_fn_cache_value():
          if url:
            val = self.url_cache.get(key=url, createfunc=create_url_cache_value)
            return fn(val, *args, **kwargs)
          else:
            return fn(*args, **kwargs)

        try:
          return self.fn_cache.get(key=fn.__name__+url, createfunc=create_fn_cache_value)
        except SourceError, e:
          return flask.abort(e.code)
예제 #50
0
import uuid
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

from decimal import Decimal#depois apagar isto quando tirar daqui o to_decimal
import traceback
import ujson

cache_opts = {
    'cache.type': 'memory',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

cache = CacheManager(**parse_cache_config_options(cache_opts))
erp_cache = cache.get_cache('erp_cache', type='memory', expire=10)
short_cache = cache.get_cache('short_cache', type='memory', expire=10)


def get_context(window_id):
    #print('Im on get_context', window_id)
    with open('../tmp/{window_id}ctx.json'.format(window_id=window_id), mode='r' , encoding='utf-8') as json_file:
        json_string = json_file.read()
        #print (json_string)
        ctx_dict = ujson.loads(json_string)
    return ctx_dict


def set_context(window_id, ctx_dict):
    #print('Im on set_context', window_id)
    #print (ctx_dict)
예제 #51
0
f.close()

redirect_uri = dns + '/redirect'
client_id = '569529233074-a9eq4l7argkbjfv1opcp2kdbf2b2hc2b.apps.googleusercontent.com'
client_secret = 'ugqrZlVwM814f9Rmc5_3UGPZ'

#cache
cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': '/tmp/cache/data',
    'cache.lock_dir': '/tmp/cache/lock'
}

cache = CacheManager(**parse_cache_config_options(cache_opts))

tmpl_cache = cache.get_cache(redirect_uri, type='dbm', expire = 3600)
tmpl_cache.clear()


#configure middleware
session_opts = {
    'session.type': 'file',
    'session.cookie_expires': 300,
    'session.data_dir': './data',
    'session.auto': True
}

wsgi_app = SessionMiddleware(bottle.app(), session_opts)

@route('/')
def home():
예제 #52
0
from datetime import datetime
import numpy as np
import pandas as pd
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

DASH_LOG_LEVEL = os.getenv("DASH_LOG_LEVEL", default="info")
logging.basicConfig(
    level=getattr(logging, DASH_LOG_LEVEL.upper(), logging.INFO))

# Set up cache.
CACHE_EXPIRE = int(os.getenv("DASH_CACHE_EXPIRE", default="43200"))
logging.info("Cache expire set to %s seconds", CACHE_EXPIRE)
cache_opts = {"cache.type": "memory"}
cache = CacheManager(**parse_cache_config_options(cache_opts))
data_cache = cache.get_cache("api_data", type="memory", expire=CACHE_EXPIRE)

# Bypass SSL certification check for the AICC server
# Remove if/when they address that configuration
# TODO -- verify if this is still needed after AICC update,
# unfortunately still true as of 4/20
try:
    _create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
    # Legacy Python that doesn't verify HTTPS certificates by default
    pass
else:
    # Handle target environment that doesn't support HTTPS verification
    ssl._create_default_https_context = _create_unverified_https_context

if os.getenv("FLASK_DEBUG") == "True":
예제 #53
0
파일: search.py 프로젝트: aknott/scearch
pool = gevent.pool.Pool(40)

import soundcloud
from time import time, strptime, mktime, clock
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': 'cache/data',
    'cache.lock_dir': 'cache/lock'
}

#Cache stores search results for 24 hours
cm = CacheManager(**parse_cache_config_options(cache_opts))
cache = cm.get_cache('trackcache', type='dbm', expire=3600 * 24)

client = soundcloud.Client(client_id='af912f440f0d027065e7351089b08a52')


def getPlaysPer(track):
    created_time = strptime(track.created_at[:-6], "%Y/%m/%d %H:%M:%S")
    plays_per = track.playback_count / ((time() - mktime(created_time)) /
                                        (3600 * 24))
    return plays_per


def getHype(track):
    if (track.playback_count > 500):
        hyperatio = float(track.favoritings_count) / float(
            track.playback_count)
예제 #54
0
#logout button
logoutButton = '''<FORM METHOD="LINK" ACTION="''' + baseURL + '''/logout" ALIGN = "right">
<INPUT TYPE="submit" VALUE="Logout">
</FORM></body></html>'''

#cache
cache_opts = {
	'cache.type': 'file',
	'cache.data_dir': '/tmp/cache/data',
	'cache.lock_dir': '/tmp/cache/lock'
}

cache = CacheManager(**parse_cache_config_options(cache_opts))

tmpl_cache = cache.get_cache(baseURL + '/search', type='dbm', expire = 3600)
tmpl_cache.clear()

#no browser back
disableBack = """<html><head><SCRIPT type="text/javascript">
    window.history.forward();
    function noBack() { window.history.forward(); }
</SCRIPT>
</HEAD>
<BODY onload="noBack();"
    onpageshow="if (event.persisted) noBack();" onunload="">"""


#function connecting to google API
def googleAPI():
	#google api set up
예제 #55
0
파일: next.py 프로젝트: kiddzero/williebot
import willie
from willie.module import commands
from beaker.cache import CacheManager
from beaker.util import parse_cache_config_options

cache_opts = {
    'cache.type': 'file',
    'cache.data_dir': './next_data',
    'cache.lock_dir': './next_lock'
}

cache = CacheManager(**parse_cache_config_options(cache_opts))
tcache = cache.get_cache("learn", type='dbm')

@commands("next")
def next(bot, trigger):
  tmp = trigger.group(2).split()
  person = tmp[0]
  msg = ' '.join(tmp[1:])
  key = "__%s" % person
  tcache.put(key, msg)
  bot.say("Next'd %s: %s" % (person, msg))

def join_msg(bot, trigger):
  key = "__%s" % trigger.nick
  if tcache.has_key(key):
    bot.reply(tcache.get(key))
    tcache.remove(key)

join_msg.event = "JOIN"
join_msg.rule = r'.*'